gt stringclasses 1
value | context stringlengths 2.49k 119k |
|---|---|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A luigi file system client that wraps around snakebite
Originally written by Alan Brenner <alan@magnetic.com> github.com/alanbbr
"""
from luigi.contrib.hdfs import config as hdfs_config
from luigi.contrib.hdfs import abstract_client as hdfs_abstract_client
from luigi import six
import luigi.contrib.target
import logging
import datetime
import os
logger = logging.getLogger('luigi-interface')
class SnakebiteHdfsClient(hdfs_abstract_client.HdfsFileSystem):
"""
A hdfs client using snakebite. Since Snakebite has a python API, it'll be
about 100 times faster than the hadoop cli client, which does shell out to
a java program on each file system operation.
"""
def __init__(self):
super(SnakebiteHdfsClient, self).__init__()
self._bite = None
self.pid = -1
@staticmethod
def list_path(path):
if isinstance(path, list) or isinstance(path, tuple):
return path
# TODO: Should this be:
# isinstance(path, (six.text_type, six.binary_type))?
if isinstance(path, six.string_types):
return [path, ]
return [str(path), ]
def get_bite(self):
"""
If Luigi has forked, we have a different PID, and need to reconnect.
"""
config = hdfs_config.hdfs()
if self.pid != os.getpid() or not self._bite:
client_kwargs = dict(filter(
lambda k_v: k_v[1] is not None and k_v[1] != '', six.iteritems({
'hadoop_version': config.client_version,
'effective_user': config.effective_user,
})
))
if config.snakebite_autoconfig:
"""
This is fully backwards compatible with the vanilla Client and can be used for a non HA cluster as well.
This client tries to read ``${HADOOP_PATH}/conf/hdfs-site.xml`` to get the address of the namenode.
The behaviour is the same as Client.
"""
from snakebite.client import AutoConfigClient
self._bite = AutoConfigClient(**client_kwargs)
else:
from snakebite.client import Client
self._bite = Client(config.namenode_host, config.namenode_port, **client_kwargs)
return self._bite
def exists(self, path):
"""
Use snakebite.test to check file existence.
:param path: path to test
:type path: string
:return: boolean, True if path exists in HDFS
"""
return self.get_bite().test(path, exists=True)
def move(self, path, dest):
"""
Use snakebite.rename, if available.
:param path: source file(s)
:type path: either a string or sequence of strings
:param dest: destination file (single input) or directory (multiple)
:type dest: string
:return: list of renamed items
"""
parts = dest.rstrip('/').split('/')
if len(parts) > 1:
dir_path = '/'.join(parts[0:-1])
if not self.exists(dir_path):
self.mkdir(dir_path, parents=True)
return list(self.get_bite().rename(self.list_path(path), dest))
def rename_dont_move(self, path, dest):
"""
Use snakebite.rename_dont_move, if available.
:param path: source path (single input)
:type path: string
:param dest: destination path
:type dest: string
:return: True if succeeded
:raises: snakebite.errors.FileAlreadyExistsException
"""
from snakebite.errors import FileAlreadyExistsException
try:
self.get_bite().rename2(path, dest, overwriteDest=False)
except FileAlreadyExistsException:
# Unfortunately python2 don't allow exception chaining.
raise luigi.target.FileAlreadyExists()
def remove(self, path, recursive=True, skip_trash=False):
"""
Use snakebite.delete, if available.
:param path: delete-able file(s) or directory(ies)
:type path: either a string or a sequence of strings
:param recursive: delete directories trees like \\*nix: rm -r
:type recursive: boolean, default is True
:param skip_trash: do or don't move deleted items into the trash first
:type skip_trash: boolean, default is False (use trash)
:return: list of deleted items
"""
return list(self.get_bite().delete(self.list_path(path), recurse=recursive))
def chmod(self, path, permissions, recursive=False):
"""
Use snakebite.chmod, if available.
:param path: update-able file(s)
:type path: either a string or sequence of strings
:param permissions: \\*nix style permission number
:type permissions: octal
:param recursive: change just listed entry(ies) or all in directories
:type recursive: boolean, default is False
:return: list of all changed items
"""
if type(permissions) == str:
permissions = int(permissions, 8)
return list(self.get_bite().chmod(self.list_path(path),
permissions, recursive))
def chown(self, path, owner, group, recursive=False):
"""
Use snakebite.chown/chgrp, if available.
One of owner or group must be set. Just setting group calls chgrp.
:param path: update-able file(s)
:type path: either a string or sequence of strings
:param owner: new owner, can be blank
:type owner: string
:param group: new group, can be blank
:type group: string
:param recursive: change just listed entry(ies) or all in directories
:type recursive: boolean, default is False
:return: list of all changed items
"""
bite = self.get_bite()
if owner:
if group:
return all(bite.chown(self.list_path(path), "%s:%s" % (owner, group),
recurse=recursive))
return all(bite.chown(self.list_path(path), owner, recurse=recursive))
return list(bite.chgrp(self.list_path(path), group, recurse=recursive))
def count(self, path):
"""
Use snakebite.count, if available.
:param path: directory to count the contents of
:type path: string
:return: dictionary with content_size, dir_count and file_count keys
"""
try:
res = self.get_bite().count(self.list_path(path)).next()
dir_count = res['directoryCount']
file_count = res['fileCount']
content_size = res['spaceConsumed']
except StopIteration:
dir_count = file_count = content_size = 0
return {'content_size': content_size, 'dir_count': dir_count,
'file_count': file_count}
def copy(self, path, destination):
"""
Raise a NotImplementedError exception.
"""
raise NotImplementedError("SnakebiteClient in luigi doesn't implement copy")
def put(self, local_path, destination):
"""
Raise a NotImplementedError exception.
"""
raise NotImplementedError("Snakebite doesn't implement put")
def get(self, path, local_destination):
"""
Use snakebite.copyToLocal, if available.
:param path: HDFS file
:type path: string
:param local_destination: path on the system running Luigi
:type local_destination: string
"""
return list(self.get_bite().copyToLocal(self.list_path(path),
local_destination))
def mkdir(self, path, parents=True, mode=0o755, raise_if_exists=False):
"""
Use snakebite.mkdir, if available.
Snakebite's mkdir method allows control over full path creation, so by
default, tell it to build a full path to work like ``hadoop fs -mkdir``.
:param path: HDFS path to create
:type path: string
:param parents: create any missing parent directories
:type parents: boolean, default is True
:param mode: \\*nix style owner/group/other permissions
:type mode: octal, default 0755
"""
result = list(self.get_bite().mkdir(self.list_path(path),
create_parent=parents, mode=mode))
if raise_if_exists and "ile exists" in result[0].get('error', ''):
raise luigi.target.FileAlreadyExists("%s exists" % (path, ))
return result
def listdir(self, path, ignore_directories=False, ignore_files=False,
include_size=False, include_type=False, include_time=False,
recursive=False):
"""
Use snakebite.ls to get the list of items in a directory.
:param path: the directory to list
:type path: string
:param ignore_directories: if True, do not yield directory entries
:type ignore_directories: boolean, default is False
:param ignore_files: if True, do not yield file entries
:type ignore_files: boolean, default is False
:param include_size: include the size in bytes of the current item
:type include_size: boolean, default is False (do not include)
:param include_type: include the type (d or f) of the current item
:type include_type: boolean, default is False (do not include)
:param include_time: include the last modification time of the current item
:type include_time: boolean, default is False (do not include)
:param recursive: list subdirectory contents
:type recursive: boolean, default is False (do not recurse)
:return: yield with a string, or if any of the include_* settings are
true, a tuple starting with the path, and include_* items in order
"""
bite = self.get_bite()
for entry in bite.ls(self.list_path(path), recurse=recursive):
if ignore_directories and entry['file_type'] == 'd':
continue
if ignore_files and entry['file_type'] == 'f':
continue
rval = [entry['path'], ]
if include_size:
rval.append(entry['length'])
if include_type:
rval.append(entry['file_type'])
if include_time:
rval.append(datetime.datetime.fromtimestamp(entry['modification_time'] / 1000))
if len(rval) > 1:
yield tuple(rval)
else:
yield rval[0]
def touchz(self, path):
"""
Raise a NotImplementedError exception.
"""
raise NotImplementedError("SnakebiteClient in luigi doesn't implement touchz")
| |
"""
Python implementation of the TIIP (Thin Industrial Internet Protocol) protocol.
"""
import json
from datetime import datetime as dt
from datetime import timedelta as td
import dateutil.parser as parser
# Python3 compability fixes
import sys
PY3 = sys.version_info > (3,)
if PY3:
long = int
unicode = str
else:
# noinspection PyShadowingBuiltins
bytes = str
__version__ = 'tiip.3.0' # TIIP protocol version
class TIIPMessage(object):
# noinspection PyShadowingBuiltins
def __init__(
self, tiipStr=None, tiipDict=None, ts=None, lat=None, mid=None, sid=None, type=None,
src=None, targ=None, sig=None, ch=None, arg=None, pl=None, ok=None,
ten=None, verifyVersion=True):
"""
@param tiipStr: A string representation of a TIIPMessage to load on init
@param tiipDict: A dictionary representation of a TIIPMessage to load on init
@raise: TypeError, ValueError
All other arguments are keys to set in the TIIPMessage, see TIIP specification for more details:
https://github.com/whitelizard/tiip
"""
# Protocol keys
self.__pv = __version__
self.__ts = self.getTimeStamp()
self.__lat = None
self.__mid = None
self.__sid = None
self.__type = None
self.__src = None
self.__targ = None
self.__sig = None
self.__ch = None
self.__arg = None
self.__pl = None
self.__ok = None
self.__ten = None
# Parse constructor arguments
if tiipStr is not None:
self.loadFromStr(tiipStr, verifyVersion)
if tiipDict is not None:
self.loadFromDict(tiipDict, verifyVersion)
if ts is not None:
self.ts = ts
if lat is not None:
self.lat = lat
if mid is not None:
self.mid = mid
if sid is not None:
self.sid = sid
if type is not None:
self.type = type
if src is not None:
self.src = src
if targ is not None:
self.targ = targ
if sig is not None:
self.sig = sig
if ch is not None:
self.ch = ch
if arg is not None:
self.arg = arg
if pl is not None:
self.pl = pl
if ok is not None:
self.ok = ok
if ten is not None:
self.ten = ten
def __str__(self):
return json.dumps(dict(self))
def __iter__(self):
yield 'pv', self.__pv
yield 'ts', self.__ts
if self.__lat is not None:
yield 'lat', self.__lat
if self.__mid is not None:
yield 'mid', self.__mid
if self.__sid is not None:
yield 'sid', self.__sid
if self.__type is not None:
yield 'type', self.__type
if self.__src is not None:
yield 'src', self.__src
if self.__targ is not None:
yield 'targ', self.__targ
if self.__sig is not None:
yield 'sig', self.__sig
if self.__ch is not None:
yield 'ch', self.__ch
if self.__arg is not None:
yield 'arg', self.__arg
if self.__pl is not None:
yield 'pl', self.__pl
if self.__ok is not None:
yield 'ok', self.__ok
if self.__ten is not None:
yield 'ten', self.__ten
@staticmethod
def getTimeStamp():
"""
Creates a timestamp string representation according to the TIIP-specification for timestamps.
@return:
"""
return dt.utcnow().isoformat(timespec='microseconds') + 'Z'
@property
def pv(self):
return self.__pv
@property
def ts(self):
return self.__ts
@ts.setter
def ts(self, value):
if isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
try:
dateObj = parser.parse(value)
except ValueError:
raise ValueError('timestamp string must be parseable to datetime')
if dateObj.utcoffset() not in [None, td(0)]:
raise ValueError('timestamp string must be in utc timezone')
if value[-1] != 'Z' or value[19] != '.':
raise ValueError('seconds must be decimals and end with Z')
self.__ts = value
elif isinstance(value, dt):
if value.utcoffset() not in [None, td(0)]:
raise ValueError('timestamp string must be in utc timezone')
iso = value.isoformat(timespec='microseconds')
if iso.endswith("+00:00"):
iso = iso[:-6]
self.__ts = iso + 'Z'
else:
raise TypeError('timestamp can only be of types datetime or a valid unicode or string representation of a iso 6801')
@property
def lat(self):
return self.__lat
@lat.setter
def lat(self, value):
if value is None:
self.__lat = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
try:
float(value) # Check if string is float representation
except ValueError:
raise ValueError('Latency string must be parseable to float')
else:
self.__lat = value
elif isinstance(value, (int, float, long)):
self.__lat = repr(round(value, 6))
else:
raise TypeError('Latency can only be of types None, float, int, long or a valid unicode or string representation of a float')
@property
def mid(self):
return self.__mid
@mid.setter
def mid(self, value):
if value is None:
self.__mid = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__mid = value
else:
raise TypeError('mid can only be of types unicode, str or None')
@property
def sid(self):
return self.__sid
@sid.setter
def sid(self, value):
if value is None:
self.__sid = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__sid = value
else:
raise TypeError('sid can only be of types unicode, str or None')
@property
def type(self):
return self.__type
@type.setter
def type(self, value):
if value is None:
self.__type = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__type = value
else:
raise TypeError('type can only be of types unicode, str or None')
@property
def src(self):
return self.__src
@src.setter
def src(self, value):
if value is None:
self.__src = None
elif isinstance(value, list):
self.__src = value
else:
raise TypeError('source can only be of types list or None')
@property
def targ(self):
return self.__targ
@targ.setter
def targ(self, value):
if value is None:
self.__targ = None
elif isinstance(value, list):
self.__targ = value
else:
raise TypeError('target can only be of types list or None')
@property
def sig(self):
return self.__sig
@sig.setter
def sig(self, value):
if value is None:
self.__sig = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__sig = value
else:
raise TypeError('signal can only be of types unicode, str or None')
@property
def ch(self):
return self.__ch
@ch.setter
def ch(self, value):
if value is None:
self.__ch = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__ch = value
else:
raise TypeError('channel can only be of types unicode, str or None')
@property
def arg(self):
return self.__arg
@arg.setter
def arg(self, value):
if value is None:
self.__arg = None
elif isinstance(value, dict):
self.__arg = value
else:
raise TypeError('arguments can only be of types dict or None')
@property
def pl(self):
return self.__pl
@pl.setter
def pl(self, value):
if value is None:
self.__pl = None
elif isinstance(value, list):
self.__pl = value
else:
raise TypeError('payload can only be of types list or None')
@property
def ok(self):
return self.__ok
@ok.setter
def ok(self, value):
if value is None:
self.__ok = None
elif isinstance(value, bool):
self.__ok = value
else:
raise TypeError('ok can only be of types bool or None')
@property
def ten(self):
return self.__ten
@ten.setter
def ten(self, value):
if value is None:
self.__ten = None
elif isinstance(value, str) or isinstance(value, unicode) or isinstance(value, bytes):
self.__ten = value
else:
raise TypeError('tenant can only be of types unicode, str or None')
def loadFromStr(self, tiipStr, verifyVersion=True):
"""
Loads this object with values from a string or unicode representation of a TIIPMessage.
@param tiipStr: The string to load properties from.
@param verifyVersion: True to verify that tiipDict has the right protocol
@raise: TypeError, ValueError
@return: None
"""
tiipDict = json.loads(tiipStr)
self.loadFromDict(tiipDict, verifyVersion)
def loadFromDict(self, tiipDict, verifyVersion=True):
"""
Loads this object with values from a dictionary representation of a TIIPMessage.
@param tiipDict: The dictionary to load properties from.
@param verifyVersion: True to verify that tiipDict has the right protocol
@raise: TypeError, ValueError
@return: None
"""
if verifyVersion:
if 'pv' not in tiipDict or tiipDict['pv'] != self.__pv:
raise ValueError('Incorrect tiip version "' + str(tiipDict['pv']) + '" expected "' + self.__pv + '"')
if 'pv' not in tiipDict or tiipDict['pv'] != self.__pv:
if tiipDict['pv'] == "tiip.2.0":
if 'ct' in tiipDict:
ct = float(tiipDict['ct'])
ts = float(tiipDict['ts'])
tiipDict['ts'] = str(ct)
tiipDict['lat'] = str(ts - ct)
tiipDict['ts'] = dt.utcfromtimestamp(float(tiipDict['ts'])).isoformat(timespec='microseconds') + 'Z'
if 'ts' in tiipDict:
self.ts = tiipDict['ts']
if 'lat' in tiipDict:
self.lat = tiipDict['lat']
if 'mid' in tiipDict:
self.mid = tiipDict['mid']
if 'sid' in tiipDict:
self.sid = tiipDict['sid']
if 'type' in tiipDict:
self.type = tiipDict['type']
if 'src' in tiipDict:
self.src = tiipDict['src']
if 'targ' in tiipDict:
self.targ = tiipDict['targ']
if 'sig' in tiipDict:
self.sig = tiipDict['sig']
if 'ch' in tiipDict:
self.ch = tiipDict['ch']
if 'arg' in tiipDict:
self.arg = tiipDict['arg']
if 'pl' in tiipDict:
self.pl = tiipDict['pl']
if 'ok' in tiipDict:
self.ok = tiipDict['ok']
if 'ten' in tiipDict:
self.ten = tiipDict['ten']
def asVersion(self, version):
if version == self.__pv:
return str(self)
elif version == "tiip.2.0":
tiipDict = {}
for key, val in self:
tiipDict[key] = val
if "lat" in tiipDict:
ct = parser.parse(tiipDict["ts"]).timestamp()
tiipDict["ct"] = str(ct)
tiipDict["ts"] = str(ct + float(tiipDict["lat"]))
tiipDict.pop("lat")
else:
tiipDict["ts"] = str(parser.parse(tiipDict["ts"]).timestamp())
tiipDict["pv"] = version
return json.dumps(tiipDict)
else:
raise ValueError('Incorrect tiip version. Can only handle versions: tiip.2.0 and tiip.3.0')
| |
# Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import json
import os
import sys
import textwrap
from typ import main
from typ import test_case
from typ import Host
from typ import VERSION
from typ.fakes import test_result_server_fake
is_python3 = bool(sys.version_info.major == 3)
if is_python3: # pragma: python3
# pylint: disable=redefined-builtin,invalid-name
unicode = str
d = textwrap.dedent
PASS_TEST_PY = """
import unittest
class PassingTest(unittest.TestCase):
def test_pass(self):
pass
"""
PASS_TEST_FILES = {'pass_test.py': PASS_TEST_PY}
FAIL_TEST_PY = """
import unittest
class FailingTest(unittest.TestCase):
def test_fail(self):
self.fail()
"""
FAIL_TEST_FILES = {'fail_test.py': FAIL_TEST_PY}
OUTPUT_TEST_PY = """
import sys
import unittest
class PassTest(unittest.TestCase):
def test_out(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
def test_err(self):
sys.stderr.write("hello on stderr\\n")
class FailTest(unittest.TestCase):
def test_out_err_fail(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
sys.stderr.write("hello on stderr\\n")
self.fail()
"""
OUTPUT_TEST_FILES = {'output_test.py': OUTPUT_TEST_PY}
SF_TEST_PY = """
import sys
import unittest
class SkipMethods(unittest.TestCase):
@unittest.skip('reason')
def test_reason(self):
self.fail()
@unittest.skipIf(True, 'reason')
def test_skip_if_true(self):
self.fail()
@unittest.skipIf(False, 'reason')
def test_skip_if_false(self):
self.fail()
class SkipSetup(unittest.TestCase):
def setUp(self):
self.skipTest('setup failed')
def test_notrun(self):
self.fail()
@unittest.skip('skip class')
class SkipClass(unittest.TestCase):
def test_method(self):
self.fail()
class SetupClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
sys.stdout.write('in setupClass\\n')
sys.stdout.flush()
assert False, 'setupClass failed'
def test_method1(self):
pass
def test_method2(self):
pass
class ExpectedFailures(unittest.TestCase):
@unittest.expectedFailure
def test_fail(self):
self.fail()
@unittest.expectedFailure
def test_pass(self):
pass
"""
SF_TEST_FILES = {'sf_test.py': SF_TEST_PY}
LOAD_TEST_PY = """
import unittest
class BaseTest(unittest.TestCase):
pass
def method_fail(self):
self.fail()
def method_pass(self):
pass
def load_tests(_, _2, _3):
setattr(BaseTest, "test_fail", method_fail)
setattr(BaseTest, "test_pass", method_pass)
suite = unittest.TestSuite()
suite.addTest(BaseTest("test_fail"))
suite.addTest(BaseTest("test_pass"))
return suite
"""
LOAD_TEST_FILES = {'load_test.py': LOAD_TEST_PY}
path_to_main = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'runner.py')
class TestCli(test_case.MainTestCase):
prog = [sys.executable, path_to_main]
files_to_ignore = ['*.pyc']
def test_bad_arg(self):
self.check(['--bad-arg'], ret=2, out='',
rerr='.*: error: unrecognized arguments: --bad-arg\n')
self.check(['-help'], ret=2, out='',
rerr=(".*: error: argument -h/--help: "
"ignored explicit argument 'elp'\n"))
def test_bad_metadata(self):
self.check(['--metadata', 'foo'], ret=2, err='',
out='Error: malformed --metadata "foo"\n')
def test_basic(self):
self.check([], files=PASS_TEST_FILES,
ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_coverage(self):
try:
import coverage # pylint: disable=W0612
files = {
'pass_test.py': PASS_TEST_PY,
'fail_test.py': FAIL_TEST_PY,
}
self.check(['-c', 'pass_test'], files=files, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
Name Stmts Miss Cover
----------------------------------
fail_test.py 4 4 0%
pass_test.py 4 0 100%
----------------------------------
TOTAL 8 4 50%
"""))
except ImportError: # pragma: no cover
# We can never cover this line, since running coverage means
# that import will succeed.
self.check(['-c'], files=PASS_TEST_FILES, ret=1,
out='Error: coverage is not installed\n', err='')
def test_debugger(self):
if sys.version_info.major == 3: # pragma: python3
return
else: # pragma: python2
_, out, _, _ = self.check(['-d'], stdin='quit()\n',
files=PASS_TEST_FILES, ret=0, err='')
self.assertIn('(Pdb) ', out)
def test_dryrun(self):
self.check(['-n'], files=PASS_TEST_FILES, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_error(self):
files = {'err_test.py': d("""\
import unittest
class ErrTest(unittest.TestCase):
def test_err(self):
foo = bar
""")}
_, out, _, _ = self.check([''], files=files, ret=1, err='')
self.assertIn('[1/1] err_test.ErrTest.test_err failed unexpectedly',
out)
self.assertIn('1 test run, 1 failure', out)
def test_fail(self):
_, out, _, _ = self.check([], files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('fail_test.FailingTest.test_fail failed unexpectedly',
out)
def test_fail_then_pass(self):
files = {'fail_then_pass_test.py': d("""\
import unittest
count = 0
class FPTest(unittest.TestCase):
def test_count(self):
global count
count += 1
if count == 1:
self.fail()
""")}
_, out, _, files = self.check(['--retry-limit', '3',
'--write-full-results-to',
'full_results.json'],
files=files, ret=0, err='')
self.assertIn('Retrying failed tests (attempt #1 of 3)', out)
self.assertNotIn('Retrying failed tests (attempt #2 of 3)', out)
self.assertIn('1 test run, 0 failures.\n', out)
results = json.loads(files['full_results.json'])
self.assertEqual(
results['tests'][
'fail_then_pass_test']['FPTest']['test_count']['actual'],
'FAIL PASS')
def test_fail_then_skip(self):
files = {'fail_then_skip_test.py': d("""\
import unittest
count = 0
class FPTest(unittest.TestCase):
def test_count(self):
global count
count += 1
if count == 1:
self.fail()
elif count == 2:
self.skipTest('')
""")}
_, out, _, files = self.check(['--retry-limit', '3',
'--write-full-results-to',
'full_results.json'],
files=files, ret=0, err='')
self.assertIn('Retrying failed tests (attempt #1 of 3)', out)
self.assertNotIn('Retrying failed tests (attempt #2 of 3)', out)
self.assertIn('1 test run, 0 failures.\n', out)
results = json.loads(files['full_results.json'])
self.assertEqual(
results['tests'][
'fail_then_skip_test']['FPTest']['test_count']['actual'],
'FAIL SKIP')
def test_failures_are_not_elided(self):
_, out, _, _ = self.check(['--terminal-width=20'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('[1/1] fail_test.FailingTest.test_fail failed '
'unexpectedly:\n', out)
def test_file_list(self):
files = PASS_TEST_FILES
self.check(['-f', '-'], files=files, stdin='pass_test\n', ret=0)
self.check(['-f', '-'], files=files, stdin='pass_test.PassingTest\n',
ret=0)
self.check(['-f', '-'], files=files,
stdin='pass_test.PassingTest.test_pass\n',
ret=0)
files = {'pass_test.py': PASS_TEST_PY,
'test_list.txt': 'pass_test.PassingTest.test_pass\n'}
self.check(['-f', 'test_list.txt'], files=files, ret=0)
def test_find(self):
files = PASS_TEST_FILES
self.check(['-l'], files=files, ret=0,
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', './pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.PassingTest.test_pass'], files=files,
ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_find_from_subdirs(self):
files = {
'foo/__init__.py': '',
'foo/pass_test.py': PASS_TEST_PY,
'bar/__init__.py': '',
'bar/tmp': '',
}
self.check(['-l', '../foo/pass_test.py'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', 'foo'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', '--path', '../foo', 'pass_test'],
files=files, cwd='bar', ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_help(self):
self.check(['--help'], ret=0, rout='.*', err='')
def test_import_failure_missing_file(self):
_, out, _, _ = self.check(['-l', 'foo'], ret=1, err='')
self.assertIn('Failed to load "foo" in find_tests', out)
self.assertIn('No module named foo', out)
def test_import_failure_missing_package(self):
files = {'foo.py': d("""\
import unittest
import package_that_does_not_exist
class ImportFailureTest(unittest.TestCase):
def test_case(self):
pass
""")}
_, out, _, _ = self.check(['-l', 'foo.py'], files=files, ret=1, err='')
self.assertIn('Failed to load "foo.py" in find_tests', out)
self.assertIn('No module named package_that_does_not_exist', out)
def test_import_failure_no_tests(self):
files = {'foo.py': 'import unittest'}
self.check(['-l', 'foo'], files=files, ret=1, err='',
out='No tests to run.\n')
def test_import_failure_syntax_error(self):
files = {'syn_test.py': d("""\
import unittest
class SyntaxErrorTest(unittest.TestCase):
def test_syntax_error_in_test(self):
syntax error
""")}
_, out, _, _ = self.check([], files=files, ret=1, err='')
self.assertIn('Failed to import test module: syn_test', out)
self.assertIn('SyntaxError: invalid syntax', out)
def test_interrupt(self):
files = {'interrupt_test.py': d("""\
import unittest
class Foo(unittest.TestCase):
def test_interrupt(self):
raise KeyboardInterrupt()
""")}
self.check(['-j', '1'], files=files, ret=130, out='',
err='interrupted, exiting\n')
def test_isolate(self):
self.check(['--isolate', '*test_pass*'], files=PASS_TEST_FILES, ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_load_tests_failure(self):
files = {'foo_test.py': d("""\
import unittest
def load_tests(_, _2, _3):
raise ValueError('this should fail')
""")}
_, out, _, _ = self.check([], files=files, ret=1, err='')
self.assertIn('this should fail', out)
def test_load_tests_single_worker(self):
files = LOAD_TEST_FILES
_, out, _, _ = self.check(['-j', '1', '-v'], files=files, ret=1,
err='')
self.assertIn('[1/2] load_test.BaseTest.test_fail failed', out)
self.assertIn('[2/2] load_test.BaseTest.test_pass passed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_load_tests_multiple_workers(self):
_, out, _, _ = self.check([], files=LOAD_TEST_FILES, ret=1, err='')
# The output for this test is nondeterministic since we may run
# two tests in parallel. So, we just test that some of the substrings
# we care about are present.
self.assertIn('test_pass passed', out)
self.assertIn('test_fail failed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_missing_builder_name(self):
self.check(['--test-results-server', 'localhost'], ret=2,
out=('Error: --builder-name must be specified '
'along with --test-result-server\n'
'Error: --master-name must be specified '
'along with --test-result-server\n'
'Error: --test-type must be specified '
'along with --test-result-server\n'), err='')
def test_ninja_status_env(self):
self.check(['-v', 'output_test.PassTest.test_out'],
files=OUTPUT_TEST_FILES, aenv={'NINJA_STATUS': 'ns: '},
out=d("""\
ns: output_test.PassTest.test_out passed
1 test run, 0 failures.
"""), err='')
def test_output_for_failures(self):
_, out, _, _ = self.check(['output_test.FailTest'],
files=OUTPUT_TEST_FILES,
ret=1, err='')
self.assertIn('[1/1] output_test.FailTest.test_out_err_fail '
'failed unexpectedly:\n'
' hello on stdout\n'
' hello on stderr\n', out)
def test_quiet(self):
self.check(['-q'], files=PASS_TEST_FILES, ret=0, err='', out='')
def test_retry_limit(self):
_, out, _, _ = self.check(['--retry-limit', '2'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('Retrying failed tests', out)
lines = out.splitlines()
self.assertEqual(len([l for l in lines
if 'test_fail failed unexpectedly:' in l]),
3)
def test_skip(self):
self.check(['--skip', '*test_fail*'], files=FAIL_TEST_FILES, ret=1,
out='No tests to run.\n', err='')
files = {'fail_test.py': FAIL_TEST_PY,
'pass_test.py': PASS_TEST_PY}
self.check(['-j', '1', '--skip', '*test_fail*'], files=files, ret=0,
out=('[1/2] fail_test.FailingTest.test_fail was skipped\n'
'[2/2] pass_test.PassingTest.test_pass passed\n'
'2 tests run, 0 failures.\n'), err='')
# This tests that we print test_started updates for skipped tests
# properly. It also tests how overwriting works.
_, out, _, _ = self.check(['-j', '1', '--overwrite', '--skip',
'*test_fail*'], files=files, ret=0,
err='', universal_newlines=False)
# We test this string separately and call out.strip() to
# avoid the trailing \r\n we get on windows, while keeping
# the \r's elsewhere in the string.
self.assertMultiLineEqual(
out.strip(),
('[0/2] fail_test.FailingTest.test_fail\r'
' \r'
'[1/2] fail_test.FailingTest.test_fail was skipped\r'
' \r'
'[1/2] pass_test.PassingTest.test_pass\r'
' \r'
'[2/2] pass_test.PassingTest.test_pass passed\r'
' \r'
'2 tests run, 0 failures.'))
def test_skips_and_failures(self):
_, out, _, _ = self.check(['-j', '1', '-v', '-v'], files=SF_TEST_FILES,
ret=1, err='')
# We do a bunch of assertIn()'s to work around the non-portable
# tracebacks.
self.assertIn(('[1/9] sf_test.ExpectedFailures.test_fail failed:\n'
' Traceback '), out)
self.assertIn(('[2/9] sf_test.ExpectedFailures.test_pass '
'passed unexpectedly'), out)
self.assertIn(('[3/9] sf_test.SetupClass.test_method1 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[4/9] sf_test.SetupClass.test_method2 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[5/9] sf_test.SkipClass.test_method was skipped:\n'
' skip class\n'), out)
self.assertIn(('[6/9] sf_test.SkipMethods.test_reason was skipped:\n'
' reason\n'), out)
self.assertIn(('[7/9] sf_test.SkipMethods.test_skip_if_false '
'failed unexpectedly:\n'
' Traceback'), out)
self.assertIn(('[8/9] sf_test.SkipMethods.test_skip_if_true '
'was skipped:\n'
' reason\n'
'[9/9] sf_test.SkipSetup.test_notrun was skipped:\n'
' setup failed\n'
'9 tests run, 4 failures.\n'), out)
def test_skip_and_all(self):
# --all should override --skip
self.check(['-l', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=1, err='',
out='No tests to run.\n')
self.check(['-l', '--all', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_skip_decorators_and_all(self):
_, out, _, _ = self.check(['--all', '-j', '1', '-v', '-v'],
files=SF_TEST_FILES, ret=1, err='')
self.assertIn('sf_test.SkipClass.test_method failed', out)
self.assertIn('sf_test.SkipMethods.test_reason failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_true failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_false failed', out)
# --all does not override explicit calls to skipTest(), only
# the decorators.
self.assertIn('sf_test.SkipSetup.test_notrun was skipped', out)
def test_sharding(self):
def run(shard_index, total_shards, tests):
files = {'shard_test.py': textwrap.dedent(
"""\
import unittest
class ShardTest(unittest.TestCase):
def test_01(self):
pass
def test_02(self):
pass
def test_03(self):
pass
def test_04(self):
pass
def test_05(self):
pass
""")}
_, out, _, _ = self.check(
['--shard-index', str(shard_index),
'--total-shards', str(total_shards),
'--jobs', '1'],
files=files)
exp_out = ''
total_tests = len(tests)
for i, test in enumerate(tests):
exp_out += ('[%d/%d] shard_test.ShardTest.test_%s passed\n' %
(i + 1, total_tests, test))
exp_out += '%d test%s run, 0 failures.\n' % (
total_tests, "" if total_tests == 1 else "s")
self.assertEqual(out, exp_out)
run(0, 1, ['01', '02', '03', '04', '05'])
run(0, 2, ['01', '03', '05'])
run(1, 2, ['02', '04'])
run(0, 6, ['01'])
def test_subdir(self):
files = {
'foo/__init__.py': '',
'foo/bar/__init__.py': '',
'foo/bar/pass_test.py': PASS_TEST_PY
}
self.check(['foo/bar'], files=files, ret=0, err='',
out=d("""\
[1/1] foo.bar.pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_timing(self):
self.check(['-t'], files=PASS_TEST_FILES, ret=0, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed '
r'\d+.\d+s\n'
r'1 test run in \d+.\d+s, 0 failures.'))
def test_test_results_server(self):
server = test_result_server_fake.start()
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'http://%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=0, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'))
finally:
posts = server.stop()
self.assertEqual(len(posts), 1)
payload = posts[0][2].decode('utf8')
self.assertIn('"test_pass": {"actual": "PASS"',
payload)
self.assertTrue(payload.endswith('--\r\n'))
self.assertNotEqual(server.log.getvalue(), '')
def test_test_results_server_error(self):
server = test_result_server_fake.start(code=500)
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'http://%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised '
'"HTTP Error 500: Internal Server Error"\n'))
finally:
_ = server.stop()
def test_test_results_server_not_running(self):
self.check(['--test-results-server', 'http://localhost:99999',
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised .*\n'))
def test_verbose_2(self):
self.check(['-vv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_verbose_3(self):
self.check(['-vvv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[0/2] output_test.PassTest.test_err queued
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[1/2] output_test.PassTest.test_out queued
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_version(self):
self.check('--version', ret=0, out=(VERSION + '\n'))
def test_write_full_results_to(self):
_, _, _, files = self.check(['--write-full-results-to',
'results.json'], files=PASS_TEST_FILES)
self.assertIn('results.json', files)
results = json.loads(files['results.json'])
self.assertEqual(results['interrupted'], False)
self.assertEqual(results['path_delimiter'], '.')
# The time it takes to run the test varies, so we test that
# we got a single entry greater than zero, but then delete it from
# the result so we can do an exact match on the rest of the trie.
result = results['tests']['pass_test']['PassingTest']['test_pass']
self.assertEqual(len(result['times']), 1)
self.assertGreater(result['times'][0], 0)
result.pop('times')
self.assertEqual(results['tests'],
{u'pass_test': {
u'PassingTest': {
u'test_pass': {
u'actual': u'PASS',
u'expected': u'PASS',
}
}
}})
def test_write_trace_to(self):
_, _, _, files = self.check(['--write-trace-to', 'trace.json'],
files=PASS_TEST_FILES)
self.assertIn('trace.json', files)
trace_obj = json.loads(files['trace.json'])
self.assertEqual(trace_obj['otherData'], {})
self.assertEqual(len(trace_obj['traceEvents']), 5)
event = trace_obj['traceEvents'][0]
self.assertEqual(event['name'], 'pass_test.PassingTest.test_pass')
self.assertEqual(event['ph'], 'X')
self.assertEqual(event['tid'], 1)
self.assertEqual(event['args']['expected'], ['Pass'])
self.assertEqual(event['args']['actual'], 'Pass')
class TestMain(TestCli):
prog = []
def make_host(self):
return Host()
def call(self, host, argv, stdin, env):
stdin = unicode(stdin)
host.stdin = io.StringIO(stdin)
if env:
host.getenv = env.get
host.capture_output()
orig_sys_path = sys.path[:]
orig_sys_modules = list(sys.modules.keys())
try:
ret = main(argv + ['-j', '1'], host)
finally:
out, err = host.restore_output()
modules_to_unload = []
for k in sys.modules:
if k not in orig_sys_modules:
modules_to_unload.append(k)
for k in modules_to_unload:
del sys.modules[k]
sys.path = orig_sys_path
return ret, out, err
def test_debugger(self):
# TODO: this test seems to hang under coverage.
pass
| |
import json
import math
import numpy
from colorful.fields import RGBColorField
from django.conf import settings
from django.contrib.gis.db import models
from django.contrib.gis.gdal import Envelope, OGRGeometry, SpatialReference
from django.contrib.postgres.fields import ArrayField
from django.db.models import Max, Min
from django.db.models.signals import m2m_changed, post_save, pre_save
from django.dispatch import receiver
from .const import WEB_MERCATOR_SRID
from .utils import hex_to_rgba
from .valuecount import ValueCountMixin
class LegendSemantics(models.Model):
"""
Labels for pixel types (urban, forrest, warm, cold, etc)
"""
name = models.CharField(max_length=50)
description = models.TextField(null=True, blank=True)
keyword = models.TextField(null=True, blank=True, max_length=100)
def __str__(self):
return self.name
class LegendEntry(models.Model):
"""
One row in a Legend.
"""
semantics = models.ForeignKey(LegendSemantics)
expression = models.CharField(max_length=500,
help_text='Use a number or a valid numpy logical expression where x is the'
'pixel value. For instance: "(-3.0 < x) & (x <= 1)" or "x <= 1".')
color = RGBColorField()
def __str__(self):
return '{}, {}, {}'.format(self.semantics.name,
self.expression,
self.color)
class Legend(models.Model):
"""
Legend object for Rasters.
"""
title = models.CharField(max_length=200)
description = models.TextField(null=True, blank=True)
entries = models.ManyToManyField(LegendEntry)
json = models.TextField(null=True, blank=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
def update_json(self):
data = []
for val in self.entries.all():
data.append({
'name': val.semantics.name,
'expression': val.expression,
'color': val.color
})
self.json = json.dumps(data)
@property
def colormap(self):
legend = json.loads(self.json)
cmap = {}
for leg in legend:
cmap[leg['expression']] = hex_to_rgba(leg['color'])
return cmap
def save(self, *args, **kwargs):
if self.id:
self.update_json()
super(Legend, self).save(*args, **kwargs)
def legend_entries_changed(sender, instance, action, **kwargs):
"""
Updates style json upon adding or removing legend entries.
"""
if action in ('post_add', 'post_remove'):
instance.update_json()
instance.save()
m2m_changed.connect(legend_entries_changed, sender=Legend.entries.through)
@receiver(post_save, sender=LegendEntry)
def update_dependent_legends_on_entry_change(sender, instance, **kwargs):
"""
Updates dependent Legends on a change in Legend entries.
"""
for legend in Legend.objects.filter(entries__id=instance.id):
legend.update_json()
legend.save()
@receiver(post_save, sender=LegendSemantics)
def update_dependent_legends_on_semantics_change(sender, instance, **kwargs):
"""
Updates dependent Legends on a change in Semantics.
"""
for entry in LegendEntry.objects.filter(semantics_id=instance.id):
for legend in Legend.objects.filter(entries__id=entry.id):
legend.update_json()
legend.save()
class RasterLayer(models.Model, ValueCountMixin):
"""
Source data model for raster layers
"""
CONTINUOUS = 'co'
CATEGORICAL = 'ca'
MASK = 'ma'
RANK_ORDERED = 'ro'
DATATYPES = (
(CONTINUOUS, 'Continuous'),
(CATEGORICAL, 'Categorical'),
(MASK, 'Mask'),
(RANK_ORDERED, 'Rank Ordered')
)
name = models.CharField(max_length=100, blank=True, null=True)
description = models.TextField(blank=True, null=True)
datatype = models.CharField(max_length=2, choices=DATATYPES, default='co')
rasterfile = models.FileField(upload_to='rasters', null=True, blank=True)
nodata = models.CharField(max_length=100, null=True, blank=True,
help_text='Leave blank to keep the internal band nodata values. If a nodata'
'value is specified here, it will be used for all bands of this raster.')
legend = models.ForeignKey(Legend, blank=True, null=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return '{} {} (type: {})'.format(self.id, self.name, self.datatype)
@property
def discrete(self):
"""
Returns true for discrete rasters.
"""
return self.datatype in (self.CATEGORICAL, self.MASK, self.RANK_ORDERED)
_bbox = None
def extent(self, srid=WEB_MERCATOR_SRID):
"""
Returns bbox for layer.
"""
if not self._bbox:
# Get bbox for raster in original coordinates
meta = self.metadata
xmin = meta.uperleftx
ymax = meta.uperlefty
xmax = xmin + meta.width * meta.scalex
ymin = ymax + meta.height * meta.scaley
# Create Polygon box
geom = OGRGeometry(Envelope((xmin, ymin, xmax, ymax)).wkt)
# Set original srs
if meta.srs_wkt:
geom.srs = SpatialReference(meta.srs_wkt)
else:
geom.srid = meta.srid
# Transform to requested srid
geom.transform(srid)
# Calculate value range for bbox
coords = geom.coords[0]
xvals = [x[0] for x in coords]
yvals = [x[1] for x in coords]
# Set bbox
self._bbox = (min(xvals), min(yvals), max(xvals), max(yvals))
return self._bbox
def index_range(self, zoom):
"""
Returns the index range for
"""
return self.rastertile_set.filter(tilez=zoom).aggregate(
Min('tilex'), Max('tilex'), Min('tiley'), Max('tiley')
)
@receiver(pre_save, sender=RasterLayer)
def reset_parse_log_if_data_changed(sender, instance, **kwargs):
try:
obj = RasterLayer.objects.get(pk=instance.pk)
except RasterLayer.DoesNotExist:
pass
else:
if obj.rasterfile.name != instance.rasterfile.name:
instance.parsestatus.log = ''
@receiver(post_save, sender=RasterLayer)
def parse_raster_layer_if_log_is_empty(sender, instance, created, **kwargs):
if created:
RasterLayerParseStatus.objects.create(rasterlayer=instance)
RasterLayerMetadata.objects.create(rasterlayer=instance)
if instance.rasterfile.name and instance.parsestatus.log == '':
if hasattr(settings, 'RASTER_USE_CELERY') and settings.RASTER_USE_CELERY:
from raster.tasks import parse_raster_layer_with_celery
parse_raster_layer_with_celery.delay(instance)
else:
from raster.parser import RasterLayerParser
parser = RasterLayerParser(instance)
parser.parse_raster_layer()
class RasterLayerMetadata(models.Model):
"""
Stores meta data for a raster layer
"""
rasterlayer = models.OneToOneField(RasterLayer, related_name='metadata')
uperleftx = models.FloatField(null=True, blank=True)
uperlefty = models.FloatField(null=True, blank=True)
width = models.IntegerField(null=True, blank=True)
height = models.IntegerField(null=True, blank=True)
scalex = models.FloatField(null=True, blank=True)
scaley = models.FloatField(null=True, blank=True)
skewx = models.FloatField(null=True, blank=True)
skewy = models.FloatField(null=True, blank=True)
numbands = models.IntegerField(null=True, blank=True)
srs_wkt = models.TextField(null=True, blank=True)
srid = models.PositiveSmallIntegerField(null=True, blank=True)
max_zoom = models.PositiveSmallIntegerField(null=True, blank=True)
def __str__(self):
return self.rasterlayer.name
class RasterLayerParseStatus(models.Model):
"""
Tracks the parsing status of the raster layer.
"""
UNPARSED = 0
DOWNLOADING_FILE = 1
REPROJECTING_RASTER = 2
CREATING_TILES = 3
DROPPING_EMPTY_TILES = 4
FINISHED = 5
FAILED = 6
STATUS_CHOICES = (
(UNPARSED, 'Layer not yet parsed'),
(DOWNLOADING_FILE, 'Downloading file'),
(REPROJECTING_RASTER, 'Reprojecting'),
(CREATING_TILES, 'Creating tiles'),
(DROPPING_EMPTY_TILES, 'Dropping empty tiles'),
(FINISHED, 'Finished parsing'),
(FAILED, 'Failed parsing'),
)
rasterlayer = models.OneToOneField(RasterLayer, related_name='parsestatus')
status = models.IntegerField(choices=STATUS_CHOICES, default=UNPARSED)
tile_level = models.IntegerField(null=True, blank=True)
log = models.TextField(default='', editable=False)
def __str__(self):
return '{0} - {1}'.format(self.rasterlayer.name, self.get_status_display())
class RasterLayerBandMetadata(models.Model):
HISTOGRAM_BINS = 100
rasterlayer = models.ForeignKey(RasterLayer)
band = models.PositiveIntegerField()
nodata_value = models.FloatField(null=True)
max = models.FloatField()
min = models.FloatField()
hist_values = ArrayField(models.FloatField(), size=HISTOGRAM_BINS)
hist_bins = ArrayField(models.FloatField(), size=HISTOGRAM_BINS + 1)
def __str__(self):
return '{} - Min {} - Max {}'.format(self.rasterlayer.name, self.min, self.max)
def save(self, *args, **kwargs):
if not self.pk:
# Construct empty histogram
hist = numpy.histogram(
[],
range=(math.floor(self.min), math.ceil(self.max)),
bins=self.HISTOGRAM_BINS
)
# Set empty histogram values
self.hist_values = hist[0].tolist()
self.hist_bins = hist[1].tolist()
super(RasterLayerBandMetadata, self).save(*args, **kwargs)
class RasterTile(models.Model):
"""
Store individual tiles of a raster data source layer.
"""
ZOOMLEVELS = (
(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7),
(8, 8), (9, 9), (10, 10), (11, 11), (12, 12), (13, 13),
(14, 14), (15, 15), (16, 16), (17, 17), (18, 18)
)
rid = models.AutoField(primary_key=True)
rast = models.RasterField(null=True, blank=True, srid=WEB_MERCATOR_SRID)
rasterlayer = models.ForeignKey(RasterLayer, null=True, blank=True, db_index=True)
tilex = models.IntegerField(db_index=True, null=True)
tiley = models.IntegerField(db_index=True, null=True)
tilez = models.IntegerField(db_index=True, null=True, choices=ZOOMLEVELS)
def __str__(self):
return '{0} {1}'.format(self.rid, self.filename)
| |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'chat_client1.ui'
#
# Created: Thu Jul 30 16:47:54 2015
# by: PyQt4 UI code generator 4.9.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import sys, socket
from thread import *
def app_version():
msg_box("Application Version", "P2P Chat v1.0")
def msg_box(title, data):
w = QWidget()
QMessageBox.information(w, title, data)
def update_list(self, data):
self.listWidget.addItem(data)
print "\a"
def server_socket(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 6190))
s.listen(1)
except socket.error, e:
msg_box("Socket Error !!",
"Unable To Setup Local Socket. Port In Use")
return
while 1:
conn, addr = s.accept()
incoming_ip = str(addr[0])
current_chat_ip = self.lineEdit.text()
if incoming_ip != current_chat_ip:
conn.close()
else:
data = conn.recv(4096)
update_list(self, data)
conn.close()
s.close()
try:
_fromUtf8 = QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
self.start_server()
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(662, 448)
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.frame = QFrame(self.centralwidget)
self.frame.setGeometry(QRect(10, 10, 651, 41))
self.frame.setFrameShape(QFrame.StyledPanel)
self.frame.setFrameShadow(QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.label = QLabel(self.frame)
self.label.setGeometry(QRect(10, 10, 131, 21))
self.label.setObjectName(_fromUtf8("label"))
self.lineEdit = QLineEdit(self.frame)
self.lineEdit.setGeometry(QRect(90, 10, 161, 21))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.label_2 = QLabel(self.frame)
self.label_2.setGeometry(QRect(260, 10, 131, 21))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.lineEdit_2 = QLineEdit(self.frame)
self.lineEdit_2.setGeometry(QRect(300, 10, 121, 21))
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.frame_2 = QFrame(self.centralwidget)
self.frame_2.setGeometry(QRect(10, 60, 301, 321))
self.frame_2.setFrameShape(QFrame.StyledPanel)
self.frame_2.setFrameShadow(QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.textEdit = QTextEdit(self.frame_2)
self.textEdit.setGeometry(QRect(10, 10, 281, 261))
self.textEdit.setObjectName(_fromUtf8("textEdit"))
self.pushButton_3 = QPushButton(self.frame_2)
self.pushButton_3.setGeometry(QRect(10, 280, 171, 31))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
#############################################################
# Executes When The Send Message Button Is Clicked
self.pushButton_3.clicked.connect(self.client_send_message)
############################################################
self.pushButton_4 = QPushButton(self.frame_2)
self.pushButton_4.setGeometry(QRect(190, 280, 93, 31))
self.pushButton_4.setObjectName(_fromUtf8("pushButton_4"))
#############################################################
# Executes When The Clear Logs Button Is Clicked
self.pushButton_4.clicked.connect(self.clear_logs)
##############################################################
self.frame_3 = QFrame(self.centralwidget)
self.frame_3.setGeometry(QRect(320, 60, 331, 321))
self.frame_3.setFrameShape(QFrame.StyledPanel)
self.frame_3.setFrameShadow(QFrame.Raised)
self.frame_3.setObjectName(_fromUtf8("frame_3"))
self.listWidget = QListWidget(self.frame_3)
self.listWidget.setGeometry(QRect(10, 10, 311, 301))
self.listWidget.setObjectName(_fromUtf8("listWidget"))
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.menubar = QMenuBar(MainWindow)
self.menubar.setGeometry(QRect(0, 0, 662, 29))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuAction = QMenu(self.menubar)
self.menuAction.setObjectName(_fromUtf8("menuAction"))
MainWindow.setMenuBar(self.menubar)
self.actionExit = QAction(MainWindow)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
#######################################################
# Executes When The SubMenu Item Version Is Clicked
self.actionExit.triggered.connect(app_version)
#######################################################
self.actionExit_2 = QAction(MainWindow)
self.actionExit_2.setObjectName(_fromUtf8("actionExit_2"))
#######################################################
# Executes When The SubMenu Item Exit Is Clicked
self.actionExit_2.triggered.connect(qApp.quit)
#######################################################
self.menuAction.addAction(self.actionExit)
self.menuAction.addAction(self.actionExit_2)
self.menubar.addAction(self.menuAction.menuAction())
self.retranslateUi(MainWindow)
QMetaObject.connectSlotsByName(MainWindow)
def clear_logs(self):
self.listWidget.clear()
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QApplication.translate("MainWindow",
"Null Byte P2P Chat", None, QApplication.UnicodeUTF8))
self.label.setText(QApplication.translate("MainWindow", "IP Address:",
None, QApplication.UnicodeUTF8))
self.label_2.setText(QApplication.translate("MainWindow", "Nick: ",
None, QApplication.UnicodeUTF8))
self.pushButton_3.setText(QApplication.translate("MainWindow",
"Send Message", None, QApplication.UnicodeUTF8))
self.pushButton_4.setText(QApplication.translate("MainWindow",
"Clear Logs", None, QApplication.UnicodeUTF8))
self.menuAction.setTitle(QApplication.translate("MainWindow",
"Menu Actions", None, QApplication.UnicodeUTF8))
self.actionExit.setText(QApplication.translate("MainWindow",
"Version", None, QApplication.UnicodeUTF8))
self.actionExit_2.setText(QApplication.translate("MainWindow",
"Exit", None, QApplication.UnicodeUTF8))
def start_server(self):
start_new_thread(server_socket, (self,))
msg_box("Success", "Server Started Sucessfully")
def client_send_message(self):
ip_address = self.lineEdit.text()
nick = self.lineEdit_2.text()
nick = nick.replace("#>","")
rmessage = self.textEdit.toPlainText()
rmessage = rmessage.replace("#>","")
rmsg = nick + " #> " + rmessage
c = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
c.connect((ip_address, 9000))
except Exception, e:
msg_box("Connection Refused", "The Address You Are Trying To Reach Is Currently Unavailable")
return
try:
c.send(rmsg)
self.listWidget.addItem(rmsg)
self.textEdit.setText("")
except Exception, e:
msg_box("Connection Refused", "The Message Cannot Be Sent. End-Point Not Connected !!")
c.close()
if __name__ == "__main__":
app = QApplication(sys.argv)
MainWindow = QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| |
# Bits are stored in ascending order.
# Remember: As bitvector is a mutable type, a __hash__ method is not
# appropriate.
from array import array
from collections import defaultdict
from itertools import islice, chain, repeat, izip_longest
__all__ = ["bitvector"]
class bitvector(object):
def __init__(self, obj=None, width=None, fill=None):
if fill is None:
fill = obj if isinstance(obj, bool) else False
if width is not None and width <= 0:
self._blob = array('B')
self._size = 0
elif isinstance(obj, bool):
# This needs to go before the `obj == 0` case because `False == 0`.
if width is None:
self._blob = array('B', [int(obj)])
self._size = 1
else:
self.__init__(None, width=width, fill=fill)
if fill != obj: self._blob[0] ^= 1
elif obj is None or obj == 0:
if width is None:
self._blob = array('B')
self._size = 0
else:
(bytes, extra) = divmod(width, 8)
self._blob = array('B', [255 if fill else 0] * bytes)
if extra != 0:
self._blob.append((1 << extra) - 1 if fill else 0)
self._size = width
elif isinstance(obj, bitvector):
self._blob = obj._blob[:]
self._size = len(obj)
if width is not None:
self.setWidth(width, fill)
elif isinstance(obj, str):
self._blob = array('B', map(ord, obj))
self._size = len(obj) * 8
if width is not None:
self.setWidth(width, fill)
elif isinstance(obj, (int, long)):
### TODO: This ignores `fill` - should it?
bytes = []
if width is None:
if obj == -1:
(bytes, self._size) = ([1], 1)
else:
cutoff = 0 if obj >= 0 else -1
while obj != cutoff:
(obj, b) = divmod(obj, 256)
bytes.append(b)
last = bytes[-1]
if cutoff == -1: last ^= 255
unused = 8
while last > 0:
last >>= 1
unused -= 1
self._size = len(bytes) * 8 - unused
if cutoff == -1:
if unused == 0:
bytes.append(1)
else:
bytes[-1] &= (1 << (9-unused)) - 1
self._size += 1
else:
for i in xrange((width+7) // 8):
(obj, b) = divmod(obj, 256)
bytes.append(b)
if obj < 0 and width % 8 != 0:
bytes[-1] &= (1 << (width % 8)) - 1
self._size = width
self._blob = array('B', bytes)
else:
# Assume `obj` is an iterable of bit values
obj = iter(obj)
if width is not None:
obj = islice(chain(obj, repeat(fill)), width)
self._size = 0
bytes = []
for byte in izip_longest(*[obj]*8):
b=0
for i, bit in enumerate(byte):
if bit:
b |= 1 << i
self._size += 1
bytes.append(b)
self._blob = array('B', bytes)
def __getitem__(self, i):
if isinstance(i, slice):
(start, stop, step) = i.indices(len(self))
if start >= stop: return bitvector()
if step == 1:
new = self >> start
stop -= start
new.setWidth(stop)
return new
else:
return bitvector([self[j] for j in range(start, stop, step)])
else:
if i < 0:
i += self._size
if not (0 <= i < self._size):
raise IndexError('bitvector index out of range')
(byte, offset) = divmod(i, 8)
return bool(self._blob[byte] & (1 << offset))
def __setitem__(self, i, new):
### TODO: Is this supposed to return something?
if isinstance(i, slice):
(start, stop, step) = i.indices(len(self))
if start > stop:
### TODO: Is there a better error or way to handle this? Lists
### seem to handle this situation oddly.
raise ValueError('bitvector.__setitem__: slice start cannot be'
' after stop')
if step == 1:
### TODO: Make this part more efficient.
new = bitvector(new)
# It's best to flush out construction errors before changing
# `self`'s width.
beyond = self[stop:]
self.setWidth(start)
self.extend(new)
self.extend(beyond)
else:
### TODO: What types should be permitted for `new` here? This
### currently only works for iterable types with __len__
### methods, `list` and `bitvector` being the only such types
### that are also accepted by the `bitvector` constructor.
indices = range(start, stop, step)
if len(indices) != len(new):
raise ValueError('attempt to assign sequence of size %d'
' to extended slice of size %d'
% (len(new), len(indices)))
# If it's good enough for the `list` type, it's good enough
# for me!
for (j,b) in zip(indices, new):
(byte, offset) = divmod(j, 8)
if b: self._blob[byte] |= 1 << offset
else: self._blob[byte] &= ~(1 << offset)
else:
if i < 0:
i += self._size
if not (0 <= i < self._size):
raise IndexError('bitvector index out of range')
(byte, offset) = divmod(i, 8)
if new: self._blob[byte] |= 1 << offset
else: self._blob[byte] &= ~(1 << offset)
def __delitem__(self, i):
if isinstance(i, slice):
(start, stop, step) = i.indices(len(self))
if start >= stop: return
if stop >= len(self):
self.setWidth(start)
elif step == 1:
(byte1, offset1) = divmod(start, 8)
(byte2, offset2) = divmod(stop, 8)
if byte1 != byte2:
inter = (stop - start - (8 - offset1)) // 8
if inter > 0:
del self._blob[byte1+1 : byte1+1+inter]
byte2 -= inter
self._size -= inter * 8
self._blob[byte1] &= (1 << offset1) - 1
self._blob[byte2] &= 255 << offset2
else:
b = self._blob[byte1]
above = b & (255 << offset2)
self._blob[byte1] = (b & (1 << offset1) - 1) \
| (above >> (offset2-offset1))
offset1 = 8 - (offset2 - offset1)
(byte2, offset2) = (byte2+1, 0)
if byte2 * 8 >= len(self):
self._size -= stop - start
return
shiftBy = 8 - offset1 + offset2
carry = 0
for i in xrange(len(self._blob)-1, byte1, -1):
(self._blob[i], carry) \
= divmod(self._blob[i] | (carry << 8), 1 << shiftBy)
self._blob[byte1] |= (carry >> offset2) << offset1
if 0 < self._size % 8 <= shiftBy:
self._blob.pop()
self._size -= shiftBy
else:
delled = 0
for j in xrange(start, stop, step):
del self[j-delled]
if step > 0:
delled += 1
else:
if i < 0:
i += self._size
if not (0 <= i < self._size):
raise IndexError('bitvector index out of range')
(byte, offset) = divmod(i, 8)
b = self._blob[byte]
self._blob[byte] = (b & (1 << offset)-1) \
| ((b & (255 << offset+1)) >> 1)
for j in xrange(byte+1, len(self._blob)):
if self._blob[j] & 1:
self._blob[j-1] |= 1 << 7
self._blob[j] >>= 1
if self._size % 8 == 1:
self._blob.pop()
self._size -= 1
def __invert__(self):
inverse = bitvector()
inverse._blob = array('B', [~b & 255 for b in self._blob])
if self._size % 8 != 0:
inverse._blob[-1] &= (1 << self._size % 8) - 1
inverse._size = self._size
return inverse
def __int__(self):
#return int(''.join('1' if b else '0' for b in reversed(self)), 2)
#return int(''.join(bin(b).zfill(8) for b in reversed(self._blob)), 2)
return reduce(lambda x,b: x*256 + b, reversed(self._blob), 0)
#return reduce(operator.__or__, [b << i*8 for (i,b) in enumerate(self._blob)])
#return sum(b << i*8 for (i,b) in enumerate(self._blob))
def __long__(self): return long(int(self))
def copy(self): return bitvector(self)
def extend(self, other): self += other #; return None
def __len__(self): return self._size
def __nonzero__(self): return any(b != 0 for b in self._blob)
def __add__(self, other):
new = bitvector(self)
new.extend(other)
return new
def __radd__(self, other):
new = bitvector(other)
new.extend(self)
return new
def __iadd__(self, other):
other = bitvector(other)
offset = self._size % 8
self._size += other._size
if offset != 0:
other <<= offset
self._blob[-1] |= other._blob[0]
self._blob.extend(other.blob[1:])
else:
self._blob.extend(other.blob)
return self
def __lshift__(self, n):
new = bitvector(self)
new <<= n
return new
def __ilshift__(self, n):
if n < 0:
self >>= -n
else:
(pads, offset) = divmod(n, 8)
if offset != 0:
carry = 0
for (i,b) in enumerate(self._blob):
(carry, self._blob[i]) = divmod((b << offset) | carry, 256)
self._blob.append(carry)
if pads != 0:
self._blob[0:0] = array('B', [0] * pads)
self._size += n
return self
def __rshift__(self, n):
new = bitvector(self)
new >>= n
return new
def __irshift__(self, n):
if n < 0:
self <<= -n
else:
del self[0:n]
return self
def __iter__(self):
i=0
for byte in self._blob:
for j in xrange(8):
if i >= self._size:
break
yield bool(byte & (1 << j))
i += 1
def __cmp__(self, other):
return cmp(type(self), type(other)) or \
cmp((self._blob, self._size), (other._blob, other._size))
def __repr__(self):
return 'bitvector(%#x, width=%d)' % (int(self), len(self))
#return 'bitvector(%s, width=%d)' % (bin(int(self)), len(self))
def __and__(self, other):
other = bitvector(other)
other &= self
return other
__rand__ = __and__
def __iand__(self, other):
if not isinstance(other, bitvector):
other = bitvector(other)
self.setWidth(min(self._size, other._size))
for i in xrange(len(self._blob)):
self._blob[i] &= other._blob[i]
return self
def __or__(self, other):
other = bitvector(other)
other |= self
return other
__ror__ = __or__
def __ior__(self, other):
if not isinstance(other, bitvector):
other = bitvector(other)
self.setWidth(max(self._size, other._size), False)
for i in xrange(len(other._blob)):
self._blob[i] |= other._blob[i]
return self
def __xor__(self, other):
other = bitvector(other)
other ^= self
return other
__rxor__ = __xor__
def __ixor__(self, other):
if not isinstance(other, bitvector):
other = bitvector(other)
self.setWidth(max(self._size, other._size), False)
for i in xrange(len(other._blob)):
self._blob[i] ^= other._blob[i]
return self
def setWidth(self, width, fill=False):
### TODO: Rename "resize"?
if width < 0: width = 0
if width < len(self):
self._blob = self._blob[0 : (width+7)//8]
if width % 8 != 0:
self._blob[-1] &= (1 << (width % 8)) - 1
else:
extra = width - len(self)
padBits = 0 if len(self) % 8 == 0 else (8 - len(self) % 8)
padBytes = (extra - padBits + 7) // 8
if fill and padBits > 0:
self._blob[-1] |= ((1 << padBits) - 1) << (8 - padBits)
self._blob.extend([255 if fill else 0] * padBytes)
self._size = width
def toggle(self, i):
if isinstance(i, slice):
(start, stop, step) = i.indices(len(self))
if start >= stop: return
if step == 1:
(byte1, offset1) = divmod(start, 8)
(byte2, offset2) = divmod(stop, 8)
if byte1 == byte2:
self._blob[byte1] ^= ((1 << offset2-offset1) - 1) << offset1
else:
self._blob[byte1] ^= 255 << offset1
for j in xrange(byte1+1, byte2):
self._blob[j] ^= 255
self._blob[byte2] ^= (1 << offset2) - 1
else:
for j in xrange(start, stop, step):
self.toggle(j)
else:
if i < 0:
i += self._size
if not (0 <= i < self._size):
raise IndexError('bitvector.toggle index out of range')
(byte, offset) = divmod(i, 8)
self._blob[byte] ^= 1 << offset
def append(self, x):
if self._size % 8 == 0:
self._blob.append(1 if x else 0)
elif x:
(byte, offset) = divmod(self._size, 8)
self._blob[byte] |= 1 << offset
self._size += 1
def toInts(self, ascending=True):
return map(None if ascending else revbyte, self._blob)
def toBytes(self, ascending=True):
f = chr if ascending else (lambda b: chr(revbyte(b)))
return ''.join(map(f, self._blob))
@classmethod
def fromBytes(cls, blob, ascending=True, width=None, fill=False):
# `blob` must be a `str` (`bytes` in Python 3)
f = ord if ascending else (lambda b: revbyte(ord(b)))
bv = cls()
bv._blob = array('B', map(f, blob))
bv._size = len(bv._blob) * 8
if width is not None:
bv.setWidth(width, fill)
return bv
def pop(self, i=-1):
x = self[i]
del self[i]
return x
def __reversed__(self):
if len(self) == 0: return
maxI = len(self) % 8 or 7
for byte in reversed(self._blob):
for i in xrange(maxI, -1, -1):
yield bool(byte & (1 << i))
maxI = 7
def __mul__(self, n):
if n <= 0: return bitvector()
prod = bitvector(self)
for _ in xrange(n-1):
prod.extend(self)
return prod
__rmul__ = __mul__
def __imul__(self, n):
if n <= 0:
self.clear()
else:
tmp = self.copy()
for _ in xrange(n-1):
self.extend(tmp)
return self
def __contains__(self, other):
if other is True:
return any(self._blob)
elif other is False:
(bytes, offset) = divmod(self._size, 8)
if any(b != 255 for b in self._blob[:bytes]):
return True
return offset != 0 and ~self._blob[bytes] & ((1<<offset) - 1) != 0
else:
return self.find(other) != -1
def count(self, sub, start=0, end=None):
### TODO: Add an optimization for when `sub` is a bool
if not isinstance(sub, bitvector):
sub = bitvector(sub)
if len(sub) == 0:
return len(self) + 1
qty = 0
while True:
try:
start = self.index(sub, start, end)
except ValueError:
return qty
else:
qty += 1
start += len(sub)
def insert(self, i, x):
### TODO: Should insertion of bitvectors also be supported, or just
### bools?
if i < 0: i += len(self)
if i < 0: i = 0
if i == len(self):
self.append(x)
elif i < len(self):
(byte, offset) = divmod(i, 8)
b = self._blob[byte]
carry = 1 if b & 128 else 0
self._blob[byte] = (b & (1 << offset)-1) \
| ((b & (255 << offset)) << 1)
if x:
self._blob[byte] |= 1 << offset
for j in xrange(byte+1, len(self._blob)):
(carry, self._blob[j]) = divmod(self._blob[j] << 1 | carry, 256)
if len(self) % 8 == 0:
self._blob.append(carry)
self._size += 1
else:
raise IndexError('bitvector.insert index out of range')
def remove(self, sub):
if not isinstance(sub, bitvector):
sub = bitvector(sub)
dex = self.index(sub)
del self[dex : dex+len(sub)]
def reverse(self):
if len(self) % 8 != 0:
self <<= 8 - len(self) % 8
self._blob.reverse()
for i in xrange(len(self._blob)):
self._blob[i] = revbyte(self._blob[i])
def find(self, sub, start=0, end=None):
if not isinstance(sub, bitvector):
sub = bitvector(sub)
if start < 0: start += len(self)
if start < 0: start = 0
if len(sub) == 0: return start
if start > len(self): return -1
if end is None: end = len(self)
if end > len(self): end = len(self)
if end < 0: end += len(self)
if end < 0: end = 0
shifted = [sub] + [None] * 7
masks = [bitvector(True, width=len(sub))] + [None] * 7
while start < end and end-start >= len(sub):
(byte, offset) = divmod(start, 8)
if shifted[offset] is None:
shifted[offset] = sub << offset
masks[offset] = bitvector([False] * offset,
width=(offset+len(sub)),
fill=True)
maskBytes = masks[offset]._blob
targetBytes = shifted[offset]._blob
if all((slf & mask) == target
for (slf, mask, target)
in zip(self._blob[byte:], maskBytes, targetBytes)):
return start
start += 1
return -1
def index(self, sub, start=0, end=None):
dex = self.find(sub, start, end)
if dex == -1: raise ValueError('bitvector.index(x): x not in bitvector')
else: return dex
def clear(self):
self._blob = array('B')
self._size = 0
def listSetBits(self):
"""Returns an iterator of the indices of all set bits in the
`bitvector`"""
i=0
for byte in self._blob:
for j in xrange(8):
if i >= self._size:
break
if byte & (1 << j):
yield i
i += 1
def listUnsetBits(self):
"""Returns an iterator of the indices of all unset bits in the
`bitvector`"""
i=0
for byte in self._blob:
for j in xrange(8):
if i >= self._size:
break
if not (byte & (1 << j)):
yield i
i += 1
@classmethod
def fromSetBits(cls, bits, width=None):
"""Constructs a `bitvector` from an iterable of indices of bits to set.
If a width is given, indices greater than or equal to `width` will
be discarded. If a width is not given, the width of the resulting
`bitvector` will be the largest index plus 1.
If a negative index is encountered, `width` is added to it first; if
the value is still negative or if `width` is `None`, a `ValueError`
is raised."""
bytes = defaultdict(int)
maxB = 0
for b in bits:
if b < 0 and width is not None:
b += width
if b < 0:
raise ValueError('negative index')
(byte, offset) = divmod(b,8)
bytes[byte] |= 1 << offset
maxB = max(b+1, maxB)
obj = cls()
if width is not None:
if width < 0: raise ValueError('negative width')
maxB = width
if maxB > 0:
obj._blob = array('B', [bytes[i] for i in range((maxB+7) // 8)])
obj._size = maxB
if width is not None and width % 8 != 0:
obj._blob[width//8] &= (1 << (width % 8)) - 1
return obj
def rstrip(self, val=False):
"""Removes leading zero bits from the `bitvector`, that is, zero bits
starting at index 0. The `bitvector` is modified in place. If
`val` is `True`, one-bits are removed instead."""
val = bool(val)
for (i,b) in enumerate(self._blob):
if b != (0xFF if val else 0):
for j in xrange(8):
if bool(b & (1 << j)) != val:
break
del self[:i*8+j]
return
self.clear()
def lstrip(self, val=False):
"""Removes trailing zero bits from the `bitvector`, that is, zero bits
starting at index `len(self)-1`. The `bitvector` is modified in
place. If `val` is `True`, one-bits are removed instead."""
val = bool(val)
for i in xrange(len(self._blob)-1, -1, -1):
b = self._blob[i]
if i == len(self._blob)-1 and self._size % 8 != 0 and val == True:
b = (b | (0xFF << (self._size % 8))) & 0xFF
if b != (0xFF if val else 0):
for j in xrange(7, -1, -1):
if bool(b & (1 << j)) != val:
break
self.setWidth(i*8+j+1)
return
self.clear()
def strip(self, val=False):
self.lstrip(val)
self.rstrip(val)
def revbyte(b): # internal helper function
b2 = 0
for i in xrange(8):
b2 <<= 1
if b & 1: b2 |= 1
b >>= 1
return b2
| |
#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2014, 2016. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
__copyright__ = 'Copyright 2014, 2016 VMware, Inc. All rights reserved.'
__vcenter_version__ = '6.0+'
import time
from com.vmware.cis.tagging_client import (
Category, CategoryModel, Tag, TagAssociation)
from com.vmware.vapi.std_client import DynamicID
from samples.vsphere.common.sample_base import SampleBase
from samples.vsphere.common.vim.helpers.get_cluster_by_name import get_cluster_id
class TaggingWorkflow(SampleBase):
"""
Demonstrates tagging CRUD operations
Step 1: Create a Tag category.
Step 2: Create a Tag under the category.
Step 3: Retrieve the managed object id of an existing cluster from its name.
Step 4: Assign the tag to the cluster.
Additional steps when clearData flag is set to TRUE:
Step 5: Detach the tag from the cluster.
Step 6: Delete the tag.
Step 7: Delete the tag category.
Note: the sample needs an existing cluster
"""
def __init__(self):
SampleBase.__init__(self, self.__doc__)
self.servicemanager = None
self.category_svc = None
self.tag_svc = None
self.tag_association = None
self.category_name = None
self.category_desc = None
self.tag_name = None
self.tag_desc = None
self.cluster_name = None
self.cluster_moid = None
self.category_id = None
self.tag_id = None
self.tag_attached = False
self.dynamic_id = None
def _options(self):
self.argparser.add_argument('-clustername', '--clustername', help='Name of the cluster to be tagged')
self.argparser.add_argument('-categoryname', '--categoryname', help='Name of the Category to be created')
self.argparser.add_argument('-categorydesc', '--categorydesc', help='Description of the Category to be created')
self.argparser.add_argument('-tagname', '--tagname', help='Name of the tag to be created')
self.argparser.add_argument('-tagdesc', '--tagdesc', help='Description of the tag to be created')
def _setup(self):
if self.cluster_name is None: # for testing
self.cluster_name = self.args.clustername
assert self.cluster_name is not None
print('Cluster Name: {0}'.format(self.cluster_name))
if self.category_name is None:
self.category_name = self.args.categoryname
assert self.category_name is not None
print('Category Name: {0}'.format(self.category_name))
if self.category_desc is None:
self.category_desc = self.args.categorydesc
assert self.category_desc is not None
print('Category Description: {0}'.format(self.category_desc))
if self.tag_name is None:
self.tag_name = self.args.tagname
assert self.tag_name is not None
print('Tag Name: {0}'.format(self.tag_name))
if self.tag_desc is None:
self.tag_desc = self.args.tagdesc
assert self.tag_desc is not None
print('Tag Description: {0}'.format(self.tag_desc))
if self.servicemanager is None:
self.servicemanager = self.get_service_manager()
# Sample is not failing if Clustername passed is not valid
# Validating if Cluster Name passed is Valid
print('finding the cluster {0}'.format(self.cluster_name))
self.cluster_moid = get_cluster_id(service_manager=self.servicemanager, cluster_name=self.cluster_name)
assert self.cluster_moid is not None
print('Found cluster:{0} mo_id:{1}'.format(self.cluster_name, self.cluster_moid))
self.category_svc = Category(self.servicemanager.stub_config)
self.tag_svc = Tag(self.servicemanager.stub_config)
self.tag_association = TagAssociation(self.servicemanager.stub_config)
def _execute(self):
print('List all the existing categories user has access to...')
categories = self.category_svc.list()
if len(categories) > 0:
for category in categories:
print('Found Category: {0}'.format(category))
else:
print('No Tag Category Found...')
print('List all the existing tags user has access to...')
tags = self.tag_svc.list()
if len(tags) > 0:
for tag in tags:
print('Found Tag: {0}'.format(tag))
else:
print('No Tag Found...')
print('creating a new tag category...')
self.category_id = self.create_tag_category(self.category_name, self.category_desc,
CategoryModel.Cardinality.MULTIPLE)
assert self.category_id is not None
print('Tag category created; Id: {0}'.format(self.category_id))
print("creating a new Tag...")
self.tag_id = self.create_tag(self.tag_name, self.tag_desc, self.category_id)
assert self.tag_id is not None
print('Tag created; Id: {0}'.format(self.tag_id))
print('updating the tag...')
date_time = time.strftime('%d/%m/%Y %H:%M:%S')
self.update_tag(self.tag_id, 'Server Tag updated at ' + date_time)
print('Tag updated; Id: {0}'.format(self.tag_id))
print('Tagging the cluster {0}...'.format(self.cluster_name))
self.dynamic_id = DynamicID(type='ClusterComputeResource', id=self.cluster_moid)
self.tag_association.attach(tag_id=self.tag_id, object_id=self.dynamic_id)
for tag_id in self.tag_association.list_attached_tags(self.dynamic_id):
if tag_id == self.tag_id:
self.tag_attached = True
break
assert self.tag_attached
print('Tagged cluster: {0}'.format(self.cluster_moid))
def _cleanup(self):
try:
if self.tag_attached:
self.tag_association.detach(self.tag_id, self.dynamic_id)
print('Removed tag from cluster: {0}'.format(self.cluster_moid))
if self.tag_id is not None:
self.delete_tag(self.tag_id)
print('Tag deleted; Id: {0}'.format(self.tag_id))
if self.category_id is not None:
self.delete_tag_category(self.category_id)
print('Tag category deleted; Id: {0}'.format(self.category_id))
except Exception as e:
raise Exception(e)
def create_tag_category(self, name, description, cardinality):
"""create a category. User who invokes this needs create category privilege."""
create_spec = self.category_svc.CreateSpec()
create_spec.name = name
create_spec.description = description
create_spec.cardinality = cardinality
associableTypes = set()
create_spec.associable_types = associableTypes
return self.category_svc.create(create_spec)
def delete_tag_category(self, category_id):
"""Deletes an existing tag category; User who invokes this API needs
delete privilege on the tag category.
"""
self.category_svc.delete(category_id)
def create_tag(self, name, description, category_id):
"""Creates a Tag"""
create_spec = self.tag_svc.CreateSpec()
create_spec.name = name
create_spec.description = description
create_spec.category_id = category_id
return self.tag_svc.create(create_spec)
def update_tag(self, tag_id, description):
"""Update the description of an existing tag.
User who invokes this API needs edit privilege on the tag.
"""
update_spec = self.tag_svc.UpdateSpec()
update_spec.setDescription = description
self.tag_svc.update(tag_id, update_spec)
def delete_tag(self, tag_id):
"""Delete an existing tag.
User who invokes this API needs delete privilege on the tag."""
self.tag_svc.delete(tag_id)
def main():
tagging_workflow = TaggingWorkflow()
tagging_workflow.main()
# Start program
if __name__ == '__main__':
main()
| |
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""This module contains a class for handling request queries."""
# flake8: noqa
import collections
import datetime
import functools
import operator
import flask
import sqlalchemy as sa
from ggrc import db
from ggrc import models
from ggrc.login import is_creator
from ggrc.fulltext.mysql import MysqlRecordProperty as Record
from ggrc.models import inflector
from ggrc.models.reflection import AttributeInfo
from ggrc.models.relationship_helper import RelationshipHelper
from ggrc.models.custom_attribute_definition import CustomAttributeDefinition
from ggrc.models.custom_attribute_value import CustomAttributeValue
from ggrc.converters import get_exportables
from ggrc.rbac import context_query_filter
from ggrc.utils import query_helpers, benchmark, convert_date_format
from ggrc_basic_permissions import UserRole
class BadQueryException(Exception):
pass
# pylint: disable=too-few-public-methods
class QueryHelper(object):
"""Helper class for handling request queries
Primary use for this class is to get list of object ids for each object type
defined in the query. All objects must pass the query filters if they are
defined.
query object = [
{
object_name: search class name,
permissions: either read or update, if none are given it defaults to read
order_by: [
{
"name": the name of the field by which to do the sorting
"desc": optional; if True, invert the sorting order
}
]
limit: [from, to] - limit the result list to a slice result[from, to]
filters: {
relevant_filters:
these filters will return all ids of the "search class name" object
that are mapped to objects defined in the dictionary inside the list.
[ list of filters joined by OR expression
[ list of filters joined by AND expression
{
"object_name": class of relevant object,
"slugs": list of relevant object slugs,
optional and if exists will be converted into ids
"ids": list of relevant object ids
}
]
],
object_filters: {
TODO: allow filtering by title, description and other object fields
}
}
}
]
After the query is done (by `get_ids` method), the results are appended to
each query object:
query object with results = [
{
object_name: search class name,
(all other object query fields)
ids: [ list of filtered objects ids ]
}
]
The result fields may or may not be present in the resulting query depending
on the attributes of `get` method.
"""
def __init__(self, query, ca_disabled=False):
self.object_map = {o.__name__: o for o in models.all_models.all_models}
self.query = self._clean_query(query)
self.ca_disabled = ca_disabled
self._set_attr_name_map()
self._count = 0
def _set_attr_name_map(self):
""" build a map for attributes names and display names
Dict containing all display_name to attr_name mappings
for all objects used in the current query
Example:
{ Program: {"Program URL": "url", "Code": "slug", ...} ...}
"""
self.attr_name_map = {}
for object_query in self.query:
object_name = object_query["object_name"]
object_class = self.object_map[object_name]
aliases = AttributeInfo.gather_aliases(object_class)
self.attr_name_map[object_class] = {}
for key, value in aliases.items():
filter_by = None
if isinstance(value, dict):
filter_name = value.get("filter_by", None)
if filter_name is not None:
filter_by = getattr(object_class, filter_name, None)
name = value["display_name"]
else:
name = value
if name:
self.attr_name_map[object_class][name.lower()] = (key.lower(),
filter_by)
def _clean_query(self, query):
""" sanitize the query object """
for object_query in query:
filters = object_query.get("filters", {}).get("expression")
self._clean_filters(filters)
self._macro_expand_object_query(object_query)
return query
def _clean_filters(self, expression):
"""Prepare the filter expression for building the query."""
if not expression or not isinstance(expression, dict):
return
slugs = expression.get("slugs")
if slugs:
ids = expression.get("ids", [])
ids.extend(self._slugs_to_ids(expression["object_name"], slugs))
expression["ids"] = ids
try:
expression["ids"] = [int(id_) for id_ in expression.get("ids", [])]
except ValueError as error:
# catch missing relevant filter (undefined id)
if expression.get("op", {}).get("name", "") == "relevant":
raise BadQueryException(u"Invalid relevant filter for {}".format(
expression.get("object_name", "")))
raise error
self._clean_filters(expression.get("left"))
self._clean_filters(expression.get("right"))
def _expression_keys(self, exp):
"""Return the list of keys specified in the expression."""
operator_name = exp.get("op", {}).get("name", None)
if operator_name in ["AND", "OR"]:
return self._expression_keys(exp["left"]).union(
self._expression_keys(exp["right"]))
left = exp.get("left", None)
if left is not None and isinstance(left, collections.Hashable):
return set([left])
else:
return set()
def _macro_expand_object_query(self, object_query):
"""Expand object query."""
def expand_task_dates(exp):
"""Parse task dates from the specified expression."""
if not isinstance(exp, dict) or "op" not in exp:
return
operator_name = exp["op"]["name"]
if operator_name in ["AND", "OR"]:
expand_task_dates(exp["left"])
expand_task_dates(exp["right"])
elif isinstance(exp["left"], (str, unicode)):
key = exp["left"]
if key in ["start", "end"]:
parts = exp["right"].split("/")
if len(parts) == 3:
try:
month, day, year = [int(part) for part in parts]
except Exception:
raise BadQueryException(
"Date must consist of numbers")
exp["left"] = key + "_date"
exp["right"] = datetime.date(year, month, day)
elif len(parts) == 2:
month, day = parts
exp["op"] = {"name": u"AND"}
exp["left"] = {
"op": {"name": operator_name},
"left": "relative_" + key + "_month",
"right": month,
}
exp["right"] = {
"op": {"name": operator_name},
"left": "relative_" + key + "_day",
"right": day,
}
elif len(parts) == 1:
exp["left"] = "relative_" + key + "_day"
else:
raise BadQueryException(u"Field {} should be a date of one of the"
u" following forms: DD, MM/DD, MM/DD/YYYY"
.format(key))
if object_query["object_name"] == "TaskGroupTask":
filters = object_query.get("filters")
if filters is not None:
exp = filters["expression"]
keys = self._expression_keys(exp)
if "start" in keys or "end" in keys:
expand_task_dates(exp)
def get_ids(self):
"""Get a list of filtered object IDs.
self.query should contain a list of queries for different objects which
will get evaluated and turned into a list of object IDs.
Returns:
list of dicts: same query as the input with all ids that match the filter
"""
for object_query in self.query:
ids = self._get_ids(object_query)
object_query["ids"] = ids
return self.query
@staticmethod
def _get_type_query(model, permission_type):
"""Filter by contexts and resources
Prepare query to filter models based on the available contexts and
resources for the given type of object.
"""
contexts, resources = query_helpers.get_context_resource(
model_name=model.__name__, permission_type=permission_type
)
if contexts is not None:
if resources:
resource_sql = model.id.in_(resources)
else:
resource_sql = sa.sql.false()
return sa.or_(
context_query_filter(model.context_id, contexts),
resource_sql)
def _get_objects(self, object_query):
"""Get a set of objects described in the filters."""
with benchmark("Get ids: _get_objects -> _get_ids"):
ids = self._get_ids(object_query)
if not ids:
return set()
object_name = object_query["object_name"]
object_class = self.object_map[object_name]
query = object_class.eager_query()
query = query.filter(object_class.id.in_(ids))
with benchmark("Get objects by ids: _get_objects -> obj in query"):
id_object_map = {obj.id: obj for obj in query}
with benchmark("Order objects by ids: _get_objects"):
objects = [id_object_map[id_] for id_ in ids]
return objects
def _get_ids(self, object_query):
"""Get a set of ids of objects described in the filters."""
object_name = object_query["object_name"]
expression = object_query.get("filters", {}).get("expression")
if expression is None:
return set()
object_class = self.object_map[object_name]
query = db.session.query(object_class.id)
requested_permissions = object_query.get("permissions", "read")
with benchmark("Get permissions: _get_ids > _get_type_query"):
type_query = self._get_type_query(object_class, requested_permissions)
if type_query is not None:
query = query.filter(type_query)
with benchmark("Parse filter query: _get_ids > _build_expression"):
filter_expression = self._build_expression(
expression,
object_class,
)
if filter_expression is not None:
query = query.filter(filter_expression)
if object_query.get("order_by"):
with benchmark("Sorting: _get_ids > order_by"):
query = self._apply_order_by(
object_class,
query,
object_query["order_by"],
)
with benchmark("Apply limit"):
limit = object_query.get("limit")
if limit:
ids, total = self._apply_limit(query, limit)
else:
ids = [obj.id for obj in query]
total = len(ids)
object_query["total"] = total
if hasattr(flask.g, "similar_objects_query"):
# delete similar_objects_query for the case when several queries are
# POSTed in one request, the first one filters by similarity and the
# second one doesn't but tries to sort by __similarity__
delattr(flask.g, "similar_objects_query")
return ids
@staticmethod
def _apply_limit(query, limit):
"""Apply limits for pagination.
Args:
query: filter query;
limit: a tuple of indexes in format (from, to); objects is sliced to
objects[from, to].
Returns:
matched objects ids and total count.
"""
try:
first, last = limit
first, last = int(first), int(last)
except (ValueError, TypeError):
raise BadQueryException("Invalid limit operator. Integers expected.")
if first < 0 or last < 0:
raise BadQueryException("Limit cannot contain negative numbers.")
elif first >= last:
raise BadQueryException("Limit start should be smaller than end.")
else:
page_size = last - first
with benchmark("Apply limit: _apply_limit > query_limit"):
# Note: limit request syntax is limit:[0,10]. We are counting
# offset from 0 as the offset of the initial row for sql is 0 (not 1).
ids = [obj.id for obj in query.limit(page_size).offset(first)]
with benchmark("Apply limit: _apply_limit > query_count"):
if len(ids) < page_size:
total = len(ids) + first
else:
# Note: using func.count() as query.count() is generating additional
# subquery
count_q = query.statement.with_only_columns([sa.func.count()])
total = db.session.execute(count_q).scalar()
return ids, total
def _apply_order_by(self, model, query, order_by):
"""Add ordering parameters to a query for objects.
This works only on direct model properties and related objects defined with
foreign keys and fails if any CAs are specified in order_by.
Args:
model: the model instances of which are requested in query;
query: a query to get objects from the db;
order_by: a list of dicts with keys "name" (the name of the field by which
to sort) and "desc" (optional; do reverse sort if True).
If order_by["name"] == "__similarity__" (a special non-field value),
similarity weights returned by get_similar_objects_query are used for
sorting.
If sorting by a relationship field is requested, the following sorting is
applied:
1. If the field is a relationship to a Titled model, sort by its title.
2. If the field is a relationship to Person, sort by its name or email (if
name is None or empty string for a person object).
3. Otherwise, raise a NotImplementedError.
Returns:
the query with sorting parameters.
"""
def sorting_field_for_person(person):
"""Get right field to sort people by: name if defined or email."""
return sa.case([(sa.not_(sa.or_(person.name.is_(None),
person.name == '')),
person.name)],
else_=person.email)
def joins_and_order(clause):
"""Get join operations and ordering field from item of order_by list.
Args:
clause: {"name": the name of model's field,
"desc": reverse sort on this field if True}
Returns:
([joins], order) - a tuple of joins required for this ordering to work
and ordering clause itself; join is None if no join
required or [(aliased entity, relationship field)]
if joins required.
"""
def by_similarity():
"""Join similar_objects subquery, order by weight from it."""
join_target = flask.g.similar_objects_query.subquery()
join_condition = model.id == join_target.c.id
joins = [(join_target, join_condition)]
order = join_target.c.weight
return joins, order
def by_ca():
"""Join fulltext index table, order by indexed CA value."""
alias = sa.orm.aliased(Record, name=u"fulltext_{}".format(self._count))
joins = [(alias, sa.and_(
alias.key == model.id,
alias.type == model.__name__,
alias.property == key)
)]
order = alias.content
return joins, order
def by_foreign_key():
"""Join the related model, order by title or name/email."""
related_model = attr.property.mapper.class_
if issubclass(related_model, models.mixins.Titled):
joins = [(alias, _)] = [(sa.orm.aliased(attr), attr)]
order = alias.title
elif issubclass(related_model, models.Person):
joins = [(alias, _)] = [(sa.orm.aliased(attr), attr)]
order = sorting_field_for_person(alias)
else:
raise NotImplementedError(u"Sorting by {model.__name__} is "
u"not implemented yet."
.format(model=related_model))
return joins, order
def by_m2m():
"""Join the Person model, order by name/email.
Implemented only for ObjectOwner mapping.
"""
if issubclass(attr.target_class, models.object_owner.ObjectOwner):
# NOTE: In the current implementation we sort only by the first
# assigned owner if multiple owners defined
oo_alias_1 = sa.orm.aliased(models.object_owner.ObjectOwner)
oo_alias_2 = sa.orm.aliased(models.object_owner.ObjectOwner)
oo_subq = db.session.query(
oo_alias_1.ownable_id,
oo_alias_1.ownable_type,
oo_alias_1.person_id,
).filter(
oo_alias_1.ownable_type == model.__name__,
~sa.exists().where(sa.and_(
oo_alias_2.ownable_id == oo_alias_1.ownable_id,
oo_alias_2.ownable_type == oo_alias_1.ownable_type,
oo_alias_2.id < oo_alias_1.id,
)),
).subquery()
owner = sa.orm.aliased(models.Person, name="owner")
joins = [
(oo_subq, sa.and_(model.__name__ == oo_subq.c.ownable_type,
model.id == oo_subq.c.ownable_id)),
(owner, oo_subq.c.person_id == owner.id),
]
order = sorting_field_for_person(owner)
else:
raise NotImplementedError(u"Sorting by m2m-field '{key}' "
u"is not implemented yet."
.format(key=key))
return joins, order
# transform clause["name"] into a model's field name
key = clause["name"].lower()
if key == "__similarity__":
# special case
if hasattr(flask.g, "similar_objects_query"):
joins, order = by_similarity()
else:
raise BadQueryException("Can't order by '__similarity__' when no ",
"'similar' filter was applied.")
else:
key, _ = self.attr_name_map[model].get(key, (key, None))
attr = getattr(model, key.encode('utf-8'), None)
if attr is None:
# non object attributes are treated as custom attributes
self._count += 1
joins, order = by_ca()
elif (isinstance(attr, sa.orm.attributes.InstrumentedAttribute) and
isinstance(attr.property,
sa.orm.properties.RelationshipProperty)):
joins, order = by_foreign_key()
elif isinstance(attr, sa.ext.associationproxy.AssociationProxy):
joins, order = by_m2m()
else:
# a simple attribute
joins, order = None, attr
if clause.get("desc", False):
order = order.desc()
return joins, order
join_lists, orders = zip(*[joins_and_order(clause) for clause in order_by])
for join_list in join_lists:
if join_list is not None:
for join in join_list:
query = query.outerjoin(*join)
return query.order_by(*orders)
def _build_expression(self, exp, object_class):
"""Make an SQLAlchemy filtering expression from exp expression tree."""
if "op" not in exp:
return None
def autocast(o_key, operator_name, value):
"""Try to guess the type of `value` and parse it from the string.
Args:
o_key (basestring): the name of the field being compared; the `value`
is converted to the type of that field.
operator_name: the name of the operator being applied.
value: the value being compared.
Returns:
a list of one or several possible meanings of `value` type compliant
with `getattr(object_class, o_key)`.
"""
def has_date_or_non_date_cad(title, definition_type):
"""Check if there is a date and a non-date CA named title.
Returns:
(bool, bool) - flags indicating the presence of date and non-date CA.
"""
cad_query = db.session.query(CustomAttributeDefinition).filter(
CustomAttributeDefinition.title == title,
CustomAttributeDefinition.definition_type == definition_type,
)
date_cad = bool(cad_query.filter(
CustomAttributeDefinition.
attribute_type == CustomAttributeDefinition.ValidTypes.DATE,
).count())
non_date_cad = bool(cad_query.filter(
CustomAttributeDefinition.
attribute_type != CustomAttributeDefinition.ValidTypes.DATE,
).count())
return date_cad, non_date_cad
if not isinstance(o_key, basestring):
return [value]
key, custom_filter = (self.attr_name_map[object_class]
.get(o_key, (o_key, None)))
date_attr = date_cad = non_date_cad = False
try:
attr_type = getattr(object_class, key).property.columns[0].type
except AttributeError:
date_cad, non_date_cad = has_date_or_non_date_cad(
title=key,
definition_type=object_class.__name__,
)
if not (date_cad or non_date_cad) and not custom_filter:
# TODO: this logic fails on CA search for Snapshots
pass
# no CA with this name and no custom filter for the field
# raise BadQueryException(u"Model {} has no field or CA {}"
# .format(object_class.__name__, o_key))
else:
if isinstance(attr_type, sa.sql.sqltypes.Date):
date_attr = True
converted_date = None
if (date_attr or date_cad) and isinstance(value, basestring):
try:
converted_date = convert_date_format(
value,
CustomAttributeValue.DATE_FORMAT_JSON,
CustomAttributeValue.DATE_FORMAT_DB,
)
except (TypeError, ValueError):
# wrong format or not a date
if not non_date_cad:
# o_key is not a non-date CA
raise BadQueryException(u"Field '{}' expects a '{}' date"
.format(
o_key,
CustomAttributeValue.DATE_FORMAT_JSON,
))
if date_attr or (date_cad and not non_date_cad):
# Filter by converted date
return [converted_date]
elif date_cad and non_date_cad and converted_date is None:
# Filter by unconverted string as date conversion was unsuccessful
return [value]
elif date_cad and non_date_cad:
if operator_name in ("<", ">"):
# "<" and ">" works incorrectly when searching by CA in both formats
return [converted_date]
else:
# Since we can have two local CADs with same name when one is Date
# and another is Text, we should handle the case when the user wants
# to search by the Text CA that should not be converted
return [converted_date, value]
else:
# Filter by unconverted string
return [value]
def _backlink(object_name, ids):
"""Convert ("__previous__", [query_id]) into (model_name, ids).
If `object_name` == "__previous__", return `object_name` and resulting
`ids` from a previous query with index `ids[0]`.
Example:
self.query[0] = {object_name: "Assessment",
type: "ids",
expression: {something}}
_backlink("__previous__", [0]) will return ("Assessment",
ids returned by query[0])
Returns:
(object_name, ids) if object_name != "__previous__",
(self.query[ids[0]]["object_name"],
self.query[ids[0]]["ids"]) otherwise.
"""
if object_name == "__previous__":
previous_query = self.query[ids[0]]
return (previous_query["object_name"], previous_query["ids"])
else:
return object_name, ids
def relevant(object_name, ids):
"""Filter by relevant object.
Args:
object_name (basestring): the name of the related model.
ids ([int]): the ids of related objects of type `object_name`.
Returns:
sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
is related (via a Relationship or another m2m) to one the given objects.
"""
return object_class.id.in_(
RelationshipHelper.get_ids_related_to(
object_class.__name__,
object_name,
ids,
)
)
def similar(object_name, ids):
"""Filter by relationships similarity.
Note: only the first id from the list of ids is used.
Args:
object_name: the name of the class of the objects to which similarity
will be computed.
ids: the ids of similar objects of type `object_name`.
Returns:
sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
is similar to one the given objects.
"""
similar_class = self.object_map[object_name]
if not hasattr(similar_class, "get_similar_objects_query"):
return BadQueryException(u"{} does not define weights to count "
u"relationships similarity"
.format(similar_class.__name__))
similar_objects_query = similar_class.get_similar_objects_query(
id_=ids[0],
types=[object_class.__name__],
)
flask.g.similar_objects_query = similar_objects_query
similar_objects_ids = [obj.id for obj in similar_objects_query]
if similar_objects_ids:
return object_class.id.in_(similar_objects_ids)
return sa.sql.false()
def unknown():
"""A fake operator for invalid operator names."""
raise BadQueryException(u"Unknown operator \"{}\""
.format(exp["op"]["name"]))
def default_filter_by(object_class, key, predicate):
"""Default filter option that tries to mach predicate in fulltext index.
This function tries to match the predicate for a give key with entries in
the full text index table.
Args:
object_class: class of the object we are querying for.
key: string containing attribute name on which we are filtering.
predicate: function containing the correct comparison predicate for
the attribute value.
Returs:
Query predicate if the given predicate matches a value for the correct
custom attribute.
"""
return object_class.id.in_(db.session.query(Record.key).filter(
Record.type == object_class.__name__,
Record.property == key,
predicate(Record.content)
))
def with_key(key, predicate):
"""Apply keys to the filter expression.
Args:
key: string containing attribute name on which we are filtering.
predicate: function containing a comparison for attribute value.
Returns:
sqlalchemy.sql.elements.BinaryExpression with:
`filter_by(predicate)` if there is custom filtering logic for `key`,
`predicate(getattr(object_class, key))` for own attributes,
`predicate(value of corresponding custom attribute)` otherwise.
"""
key = key.lower()
key, filter_by = self.attr_name_map[
object_class].get(key, (key, None))
if callable(filter_by):
return filter_by(predicate)
else:
attr = getattr(object_class, key, None)
if attr:
return predicate(attr)
else:
return default_filter_by(object_class, key, predicate)
lift_bin = lambda f: f(self._build_expression(exp["left"], object_class),
self._build_expression(exp["right"], object_class))
def text_search(text):
"""Filter by fulltext search.
The search is done only in fields indexed for fulltext search.
Args:
text: the text we are searching for.
Returns:
sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
has an indexed property that contains `text`.
"""
return object_class.id.in_(
db.session.query(Record.key).filter(
Record.type == object_class.__name__,
Record.content.ilike(u"%{}%".format(text)),
),
)
rhs_variants = lambda: autocast(exp["left"],
exp["op"]["name"],
exp["right"])
def owned(ids):
"""Get objects for which the user is owner.
Note: only the first id from the list of ids is used.
Args:
ids: the ids of owners.
Returns:
sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
is owned by one of the given users.
"""
res = db.session.query(
query_helpers.get_myobjects_query(
types=[object_class.__name__],
contact_id=ids[0],
is_creator=is_creator(),
).alias().c.id
)
res = res.all()
if res:
return object_class.id.in_([obj.id for obj in res])
return sa.sql.false()
def related_people(related_type, related_ids):
"""Get people related to the specified object.
Returns the following people:
for each object type: the users mapped via PeopleObjects,
for Program: the users that have a Program-wide role,
for Audit: the users that have a Program-wide or Audit-wide role,
for Workflow: the users mapped via WorkflowPeople and
the users that have a Workflow-wide role.
Args:
related_type: the name of the class of the related objects.
related_ids: the ids of related objects.
Returns:
sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
is related to the given users.
"""
if "Person" not in [object_class.__name__, related_type]:
return sa.sql.false()
model = inflector.get_model(related_type)
res = []
res.extend(RelationshipHelper.person_object(
object_class.__name__,
related_type,
related_ids,
))
if related_type in ('Program', 'Audit'):
res.extend(
db.session.query(UserRole.person_id).join(model, sa.and_(
UserRole.context_id == model.context_id,
model.id.in_(related_ids),
))
)
if related_type == "Audit":
res.extend(
db.session.query(UserRole.person_id).join(
models.Program,
UserRole.context_id == models.Program.context_id,
).join(model, sa.and_(
models.Program.id == model.program_id,
model.id.in_(related_ids),
))
)
if "Workflow" in (object_class.__name__, related_type):
try:
from ggrc_workflows.models import (relationship_helper as
wf_relationship_handler)
except ImportError:
# ggrc_workflows module is not enabled
return sa.sql.false()
else:
res.extend(wf_relationship_handler.workflow_person(
object_class.__name__,
related_type,
related_ids,
))
if res:
return object_class.id.in_([obj[0] for obj in res])
return sa.sql.false()
def build_op(exp_left, predicate, rhs_variants):
"""Apply predicate to `exp_left` and each `rhs` and join them with SQL OR.
Args:
exp_left: description of left operand from the expression tree.
predicate: a comparison function between a field and a value.
rhs_variants: a list of possible interpretations of right operand,
typically a list of strings.
Raises:
ValueError if rhs_variants is empty.
Returns:
sqlalchemy.sql.elements.BinaryExpression if predicate matches exp_left
and any of rhs variants.
"""
if not rhs_variants:
raise ValueError("Expected non-empty sequence in 'rhs_variants', got "
"{!r} instead".format(rhs_variants))
return with_key(
exp_left,
lambda lhs: functools.reduce(
sa.or_,
(predicate(lhs, rhs) for rhs in rhs_variants),
),
)
def build_op_shortcut(predicate):
"""A shortcut to call build_op with default lhs and rhs."""
return build_op(exp["left"], predicate, rhs_variants())
def like(left, right):
"""Handle ~ operator with SQL LIKE."""
return left.ilike(u"%{}%".format(right))
ops = {
"AND": lambda: lift_bin(sa.and_),
"OR": lambda: lift_bin(sa.or_),
"=": lambda: build_op_shortcut(operator.eq),
"!=": lambda: sa.not_(build_op_shortcut(operator.eq)),
"~": lambda: build_op_shortcut(like),
"!~": lambda: sa.not_(build_op_shortcut(like)),
"<": lambda: build_op_shortcut(operator.lt),
">": lambda: build_op_shortcut(operator.gt),
"relevant": lambda: relevant(*_backlink(exp["object_name"],
exp["ids"])),
"text_search": lambda: text_search(exp["text"]),
"similar": lambda: similar(exp["object_name"], exp["ids"]),
"owned": lambda: owned(exp["ids"]),
"related_people": lambda: related_people(exp["object_name"],
exp["ids"]),
}
return ops.get(exp["op"]["name"], unknown)()
def _slugs_to_ids(self, object_name, slugs):
"""Convert SLUG to proper ids for the given objec."""
object_class = self.object_map.get(object_name)
if not object_class:
return []
ids = [c.id for c in object_class.query.filter(
object_class.slug.in_(slugs)).all()]
return ids
| |
#!/usr/bin/env python
import argparse
import shutil
import os
import time
import csv
import json
import random
from PIL import (Image, ImageOps)
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.utils import shuffle
import plyvel
from caffe_pb2 import Datum
import utils
def parse_command_line():
parser = argparse.ArgumentParser(description="""Prepares data for training via Caffe""")
parser.add_argument("--input_metadata", help="Path to where our bounding box metadata is",
type=str, default="data/planetlab/metadata/annotated.json")
parser.add_argument("--input_images", help="Path to where our unbounded images are",
type=str, default="data/planetlab/metadata")
parser.add_argument("--output_images", help="Path to place our cropped, bounded images",
type=str, default="data/planetlab/images/bounded")
parser.add_argument("--output_leveldb", help="Path to place our prepared leveldb directories",
type=str, default="data/leveldb")
parser.add_argument("--width", help="Width of image at training time (it will be scaled to this)",
type=int, default=256)
parser.add_argument("--height", help="Height of image at training time (it will be scaled to this)",
type=int, default=256)
parser.add_argument("--log_path", help="The path to where to place log files",
type=str, default="logs")
parser.add_argument("--log_num", help="""Number that will be appended to log files; this will
be automatically padded and added with zeros, such as output00001.log""", type=int,
default=1)
parser.add_argument("--do_augmentation", help="Whether to do data augmentation",
dest="do_augmentation", action="store_true")
parser.set_defaults(do_augmentation=False)
args = vars(parser.parse_args())
utils.assert_caffe_setup()
# Ensure the random number generator always starts from the same place for consistent tests.
random.seed(0)
log_path = os.path.abspath(args["log_path"])
log_num = args["log_num"]
(output_ending, output_log_prefix, output_log_file) = utils.get_log_path_details(log_path, log_num)
input_metadata = os.path.abspath(args["input_metadata"])
input_images = os.path.abspath(args["input_images"])
output_images = os.path.abspath(args["output_images"])
output_leveldb = os.path.abspath(args["output_leveldb"])
prepare_data(input_metadata, input_images, output_images, output_leveldb, args["width"],
args["height"], args["do_augmentation"], output_log_prefix)
def prepare_data(input_metadata, input_images, output_images, output_leveldb, width, height,
do_augmentation, output_log_prefix):
"""
Prepares our training and validation data sets for use by Caffe.
"""
print "Preparing data..."
print "\tParsing Planet Labs data into independent cropped bounding boxes using %s..." % input_metadata
details = _crop_planetlab_images(_get_planetlab_details(input_metadata, input_images), output_images)
train_paths, validation_paths, train_targets, validation_targets = _split_data_sets(details)
if do_augmentation == True:
print "\tDoing data augmentation..."
train_paths, train_targets = _do_augmentation(output_images, train_paths, train_targets)
else:
print "\tNot doing data augmentation"
# TODO(brad): Balance classes if command-line option provided to do so.
#_balance_classes(details)
_print_input_details(details, train_paths, train_targets, output_log_prefix, do_augmentation)
print "\tSaving prepared data..."
training_file = os.path.join(output_leveldb, "train_leveldb")
validation_file = os.path.join(output_leveldb, "validation_leveldb")
_generate_leveldb(training_file, train_paths, train_targets, width, height)
_generate_leveldb(validation_file, validation_paths, validation_targets, width, height)
_copy_validation_images(validation_paths, output_images)
def _get_planetlab_details(input_metadata, input_images):
"""
Loads available image paths and image filenames for planetlab, along with any bounding boxes
that might be present for clouds in them.
"""
print "Using the following metadata file: %s" % input_metadata
with open(input_metadata) as data_file:
details = json.load(data_file)
for entry in details:
entry["image_path"] = os.path.join(input_images, entry["image_name"])
entry["target"] = 0
if len(entry["image_annotation"]):
entry["target"] = 1
bboxes = []
for bbox in entry["image_annotation"]:
bbox = bbox.split(",")
x = int(bbox[0])
y = int(bbox[1])
width = int(bbox[2])
height = int(bbox[3])
bboxes.append({
"left": x,
"upper": y,
"right": x + width,
"lower": y + height
})
entry["image_annotation"] = bboxes
return details
# The first early iteration of the system used Landsat data to confirm the pipeline; left here
# commented out for future reference.
# def _get_landsat_details():
# """
# Loads available image paths and image filenames for landsat, along with their target values if
# they contain clouds or not (1 if there is a cloud, 0 otherwise).
# """
#
# LANDSAT_ROOT = ROOT_DIR + "/data/landsat"
# LANDSAT_IMAGES = LANDSAT_ROOT + "/images"
# LANDSAT_METADATA = LANDSAT_ROOT + "/metadata/training-validation-set.csv"
#
# image_paths = []
# targets = []
# with open(LANDSAT_METADATA, 'r') as csvfile:
# entryreader = csv.reader(csvfile, delimiter=',', quotechar='"')
# firstline = True
# for row in entryreader:
# if firstline:
# firstline = False
# continue
# filename = row[0]
# has_cloud = 0
# if row[1] == "1":
# has_cloud = 1
#
# image_paths.append(os.path.join(LANDSAT_IMAGES, filename))
# targets.append(has_cloud)
#
# return {
# "image_paths": image_paths,
# "targets": targets,
# }
def _crop_planetlab_images(details, output_images):
"""
Generates cropped cloud and non-cloud images from our annotated bounding boxes, dumping
them into the file system and returning their full image paths with whether they are targets
or not.
"""
image_paths = []
targets = []
raw_input_images_count = 0
# Remove the directory to ensure we don't get old data runs included.
shutil.rmtree(output_images, ignore_errors=True)
os.makedirs(output_images)
for entry in details:
raw_input_images_count = raw_input_images_count + 1
if entry["target"] == 0:
# Nothing to crop, but remove the alpha channel.
new_path = os.path.join(output_images, entry["image_name"])
im = Image.open(entry["image_path"])
im = _rgba_to_rgb(im)
im.save(new_path)
image_paths.append(new_path)
targets.append(entry["target"])
print "\t\tProcessed non-cloud image %s" % new_path
elif entry["target"] == 1:
(root, ext) = os.path.splitext(entry["image_name"])
cloud_num = 1
for bbox in entry["image_annotation"]:
im = Image.open(entry["image_path"])
try:
new_path = os.path.join(output_images, "%s_cloud_%03d%s" % (root, cloud_num, ext))
new_im = im.crop((bbox["left"], bbox["upper"], bbox["right"], bbox["lower"]))
new_im = _rgba_to_rgb(new_im)
new_im.save(new_path)
image_paths.append(new_path)
targets.append(1)
print "\t\tProcessed cloud cropped image %s" % new_path
cloud_num += 1
except:
print "\t\tInvalid crop value: {}".format(bbox)
return {
"image_paths": image_paths,
"targets": targets,
"raw_input_images_count": raw_input_images_count,
}
def _print_input_details(details, train_paths, train_targets, output_log_prefix, do_augmentation):
"""
Prints out statistics about our input data.
"""
positive_cloud_class = 0
negative_cloud_class = 0
for entry in train_targets:
if entry == 1:
positive_cloud_class = positive_cloud_class + 1
else:
negative_cloud_class = negative_cloud_class + 1
ratio = min(float(positive_cloud_class), float(negative_cloud_class)) / \
max(float(positive_cloud_class), float(negative_cloud_class))
statistics = """\t\tInput data details during data preparation:
\t\tTotal # of raw input images for training/validation: %d
\t\tTotal # of generated bounding box images for training/validation: %d
\t\tPositive cloud count (# of images with clouds) in training data: %d
\t\tNegative cloud count (# of images without clouds) in training data: %d
\t\tRatio: %.2f
\t\tTotal # of input images including data augmentation: %d
\t\tBalanced classes: no
\t\tData augmentation: %r
\t\tAdding inference bounding boxes into training data: no""" \
% ( \
details["raw_input_images_count"],
len(details["image_paths"]),
positive_cloud_class,
negative_cloud_class,
ratio,
len(train_paths),
do_augmentation,
)
print statistics
statistics_log_file = output_log_prefix + ".preparation_statistics.txt"
print "\t\tSaving preparation statistics to %s..." % statistics_log_file
with open(statistics_log_file, "w") as f:
f.write(statistics)
# def _balance_classes(details):
# """
# Ensures we have the same number of positive and negative cloud/not cloud classes.
# """
def _split_data_sets(details):
"""
Shuffles and splits our datasets into training and validation sets.
"""
image_paths = details["image_paths"]
targets = details["targets"]
print "\tShuffling data..."
(image_paths, targets) = shuffle(image_paths, targets, random_state=0)
print "\tSplitting data 80% training, 20% validation..."
return train_test_split(image_paths, targets, train_size=0.8, test_size=0.2, \
random_state=0)
def _copy_validation_images(validation_paths, output_images):
"""
Takes bounded validation images and copies them to a separate directory so we can distinguish
training from validation images later on.
"""
validation_images = os.path.join(output_images, "validation")
shutil.rmtree(validation_images, ignore_errors=True)
os.makedirs(validation_images)
print "\tCopying validation images to %s..." % validation_images
for i in xrange(len(validation_paths)):
old_path = validation_paths[i]
filename = os.path.basename(old_path)
new_path = os.path.join(validation_images, filename)
shutil.copyfile(old_path, new_path)
# TODO: We really should be doing this at training time instead as on-demand transformations, via a
# Python-based layer right after input data is loaded. Example:
# https://github.com/BVLC/caffe/blob/master/python/caffe/test/test_python_layer.py
def _do_augmentation(output_images, train_paths, train_targets):
"""
Augments our training data through cropping, rotations, and mirroring.
"""
result_train_paths = []
result_train_targets = []
augmentation_dir = os.path.join(output_images, "augmentation")
shutil.rmtree(augmentation_dir, ignore_errors=True)
os.makedirs(augmentation_dir)
# Note: our Caffe train_val.prototxt already does mirroring and basic cropping, so just
# do 90 degree rotations.
for i in xrange(len(train_paths)):
input_path = train_paths[i]
input_target = train_targets[i]
print "\t\tDoing data augmentation for %s" % input_path
try:
im = Image.open(input_path)
(width, height) = im.size
process_me = []
result_train_paths.append(input_path)
result_train_targets.append(input_target)
# Only crop if our image is above some size or else it gets nonsensical.
process_me.append(im)
# if width >= 100 and height >= 100:
# _crop_image(im, width, height, process_me)
# Now rotate all of these four ways 90 degrees and then mirror them.
process_me = _rotate_images(process_me)
#process_me = _mirror_images(process_me)
# Note: the original image is the first entry. Remove it since its already saved to disk
# so we don't accidentally duplicate it again.
del process_me[0]
_, base_filename = os.path.split(input_path)
base_filename, file_extension = os.path.splitext(base_filename)
for idx in xrange(len(process_me)):
entry = process_me[idx]
new_path = os.path.join(augmentation_dir,
base_filename + "_augment_" + str(idx + 1) + file_extension)
result_train_paths.append(new_path)
result_train_targets.append(input_target)
entry.save(new_path)
except:
print "\t\tWarning: Unable to work with %s" % input_path
return result_train_paths, result_train_targets
def _crop_image(im, width, height, process_me):
"""
Crops an image into its four corners and its center, adding them to process_me.
"""
process_me.append(im.crop((0, 0, width / 2, height / 2)))
process_me.append(im.crop((width / 2, 0, width, height / 2)))
process_me.append(im.crop((0, height / 2, width / 2, height)))
process_me.append(im.crop((width / 2, height / 2, width, height)))
# Crop the center.
center_width = width / 2
center_height = height / 2
center_left = (width - center_width) / 2
center_top = (height - center_height) / 2
center_right = (width + center_width) / 2
center_bottom = (height + center_height) / 2
process_me.append(im.crop((center_left, center_top, center_right, center_bottom)))
def _rotate_images(process_me):
"""
Rotates the images given in process_me by all four possible 90 degrees.
"""
results = []
for orig_im in process_me:
results.append(orig_im)
rotated_im = orig_im
for i in range(3):
rotated_im = rotated_im.rotate(90)
results.append(rotated_im)
return results
def _mirror_images(process_me):
"""
Mirrors the given images horizontally.
"""
results = []
for orig_im in process_me:
results.append(orig_im)
results.append(ImageOps.mirror(orig_im))
return results
def _generate_leveldb(file_path, image_paths, targets, width, height):
"""
Caffe uses the LevelDB format to efficiently load its training and validation data; this method
writes paired out faces in an efficient way into this format.
"""
print "\t\tGenerating LevelDB file at %s..." % file_path
shutil.rmtree(file_path, ignore_errors=True)
db = plyvel.DB(file_path, create_if_missing=True)
wb = db.write_batch()
commit_every = 10000
start_time = int(round(time.time() * 1000))
for idx in range(len(image_paths)):
# Each image is a top level key with a keyname like 00000000011, in increasing
# order starting from 00000000000.
key = utils.get_key(idx)
# Do common normalization that might happen across both testing and validation.
try:
image = _preprocess_data(_load_numpy_image(image_paths[idx], width, height))
except:
print "\t\t\tWarning: Unable to process leveldb image %s" % image_paths[idx]
continue
# Each entry in the leveldb is a Caffe protobuffer "Datum" object containing details.
datum = Datum()
datum.channels = 3 # RGB
datum.height = height
datum.width = width
datum.data = image.tostring()
datum.label = targets[idx]
value = datum.SerializeToString()
wb.put(key, value)
if (idx + 1) % commit_every == 0:
wb.write()
del wb
wb = db.write_batch()
end_time = int(round(time.time() * 1000))
total_time = end_time - start_time
print "\t\t\tWrote batch, key: %s, time for batch: %d ms" % (key, total_time)
start_time = int(round(time.time() * 1000))
end_time = int(round(time.time() * 1000))
total_time = end_time - start_time
print "\t\t\tWriting final batch, time for batch: %d ms" % total_time
wb.write()
db.close()
def _preprocess_data(data):
"""
Applies any standard preprocessing we might do on data, whether it is during
training or testing time. 'data' is a numpy array of unrolled pixel vectors with
a remote sensing image.
"""
# Do nothing for now.
# We don't scale it's values to be between 0 and 1 as our Caffe model will do that.
return data
def _load_numpy_image(image_path, width, height):
"""
Turns one of our testing image paths into an actual image, converted into a numpy array.
"""
im = Image.open(image_path)
# Scale the image to the size required by our neural network.
im = im.resize((width, height))
data = np.asarray(im)
data = np.reshape(data, (3, height, width))
return data
def _rgba_to_rgb(im):
"""
Drops the alpha channel in an RGB image.
"""
return im.convert("RGB")
if __name__ == "__main__":
parse_command_line()
| |
# coding=utf8
import sublime
import os
import re
import shutil
class Object():
pass
def expandVars(path):
for k, v in list(os.environ.items()):
path = path.replace('%'+k+'%', v).replace('%'+k.lower()+'%', v)
return path
def escapeCMDWindows(string):
return string.replace('^', '^^')
BINARY = re.compile('\.(psd|ai|cdr|ico|cache|sublime-package|eot|svgz|ttf|woff|zip|tar|gz|rar|bz2|jar|xpi|mov|mpeg|avi|mpg|flv|wmv|mp3|wav|aif|aiff|snd|wma|asf|asx|pcm|pdf|doc|docx|xls|xlsx|ppt|pptx|rtf|sqlite|sqlitedb|fla|swf|exe)$', re.I)
class SideBarSelection:
def __init__(self, paths = []):
if not paths or len(paths) < 1:
try:
path = sublime.active_window().active_view().file_name()
if self.isNone(path):
paths = []
else:
paths = [path]
except:
paths = []
self._paths = paths
self._paths.sort()
self._obtained_selection_information_basic = False
self._obtained_selection_information_extended = False
def len(self):
return len(self._paths)
def hasDirectories(self):
self._obtainSelectionInformationBasic()
return self._has_directories
def hasFiles(self):
self._obtainSelectionInformationBasic()
return self._has_files
def hasOnlyDirectories(self):
self._obtainSelectionInformationBasic()
return self._only_directories
def hasOnlyFiles(self):
self._obtainSelectionInformationBasic()
return self._only_files
def hasProjectDirectories(self):
if self.hasDirectories():
project_directories = SideBarProject().getDirectories()
for item in self.getSelectedDirectories():
if item.path() in project_directories:
return True
return False
else:
return False
def hasItemsUnderProject(self):
for item in self.getSelectedItems():
if item.isUnderCurrentProject():
return True
return False
def hasImages(self):
return self.hasFilesWithExtension('gif|jpg|jpeg|png')
def hasFilesWithExtension(self, extensions):
extensions = re.compile('('+extensions+')$', re.I);
for item in self.getSelectedFiles():
if extensions.search(item.path()):
return True;
return False
def getSelectedItems(self):
self._obtainSelectionInformationExtended()
return self._files + self._directories;
def getSelectedItemsWithoutChildItems(self):
self._obtainSelectionInformationExtended()
items = []
for item in self._items_without_containing_child_items:
items.append(SideBarItem(item, os.path.isdir(item)))
return items
def getSelectedDirectories(self):
self._obtainSelectionInformationExtended()
return self._directories;
def getSelectedFiles(self):
self._obtainSelectionInformationExtended()
return self._files;
def getSelectedDirectoriesOrDirnames(self):
self._obtainSelectionInformationExtended()
return self._directories_or_dirnames;
def getSelectedImages(self):
return self.getSelectedFilesWithExtension('gif|jpg|jpeg|png')
def getSelectedFilesWithExtension(self, extensions):
items = []
extensions = re.compile('('+extensions+')$', re.I);
for item in self.getSelectedFiles():
if extensions.search(item.path()):
items.append(item)
return items
def _obtainSelectionInformationBasic(self):
if not self._obtained_selection_information_basic:
self._obtained_selection_information_basic = True
self._has_directories = False
self._has_files = False
self._only_directories = False
self._only_files = False
for path in self._paths:
if self._has_directories == False and os.path.isdir(path):
self._has_directories = True
if self._has_files == False and os.path.isdir(path) == False:
self._has_files = True
if self._has_files and self._has_directories:
break
if self._has_files and self._has_directories:
self._only_directories = False
self._only_files = False
elif self._has_files:
self._only_files = True
elif self._has_directories:
self._only_directories = True
def _obtainSelectionInformationExtended(self):
if not self._obtained_selection_information_extended:
self._obtained_selection_information_extended = True
self._directories = []
self._files = []
self._directories_or_dirnames = []
self._items_without_containing_child_items = []
_directories = []
_files = []
_directories_or_dirnames = []
_items_without_containing_child_items = []
for path in self._paths:
if os.path.isdir(path):
item = SideBarItem(path, True)
if item.path() not in _directories:
_directories.append(item.path())
self._directories.append(item)
if item.path() not in _directories_or_dirnames:
_directories_or_dirnames.append(item.path())
self._directories_or_dirnames.append(item)
_items_without_containing_child_items = self._itemsWithoutContainingChildItems(_items_without_containing_child_items, item.path())
else:
item = SideBarItem(path, False)
if item.path() not in _files:
_files.append(item.path())
self._files.append(item)
_items_without_containing_child_items = self._itemsWithoutContainingChildItems(_items_without_containing_child_items, item.path())
item = SideBarItem(os.path.dirname(path), True)
if item.path() not in _directories_or_dirnames:
_directories_or_dirnames.append(item.path())
self._directories_or_dirnames.append(item)
self._items_without_containing_child_items = _items_without_containing_child_items
def _itemsWithoutContainingChildItems(self, items, item):
new_list = []
add = True
for i in items:
if i.find(item+'\\') == 0 or i.find(item+'/') == 0:
continue
else:
new_list.append(i)
if (item+'\\').find(i+'\\') == 0 or (item+'/').find(i+'/') == 0:
add = False
if add:
new_list.append(item)
return new_list
def isNone(self, path):
if path == None or path == '' or path == '.' or path == '..' or path == './' or path == '../' or path == '/' or path == '//' or path == '\\' or path == '\\\\' or path == '\\\\\\\\' or path == '\\\\?\\' or path == '\\\\?' or path == '\\\\\\\\?\\\\':
return True
else:
return False
class SideBarProject:
def getDirectories(self):
return sublime.active_window().folders()
def hasDirectories(self):
return len(self.getDirectories()) > 0
def hasOpenedProject(self):
return self.getProjectFile() != None
def getDirectoryFromPath(self, path):
for directory in self.getDirectories():
maybe_path = path.replace(directory, '', 1)
if maybe_path != path:
return directory
def getProjectFile(self):
return sublime.active_window().project_file_name()
def getProjectJson(self):
return sublime.active_window().project_data()
def setProjectJson(self, data):
return sublime.active_window().set_project_data(data)
def excludeDirectory(self, path, exclude):
data = self.getProjectJson()
for folder in data['folders']:
project_folder = folder['path']
if project_folder == '.':
project_folder = SideBarItem(self.getProjectFile(), False).dirname();
if path.find(project_folder) == 0:
try:
folder['folder_exclude_patterns'].append(exclude)
except:
folder['folder_exclude_patterns'] = [exclude]
self.setProjectJson(data);
def excludeFile(self, path, exclude):
data = self.getProjectJson()
for folder in data['folders']:
project_folder = folder['path']
if project_folder == '.':
project_folder = SideBarItem(self.getProjectFile(), False).dirname();
if path.find(project_folder) == 0:
try:
folder['file_exclude_patterns'].append(exclude)
except:
folder['file_exclude_patterns'] = [exclude]
self.setProjectJson(data);
def add(self, path):
data = self.getProjectJson()
if data:
data['folders'].append({'follow_symlinks':True, 'path':path});
else:
data = {'folders': [{'follow_symlinks': True, 'path':path}]}
self.setProjectJson(data);
def refresh(self):
sublime.active_window().run_command('refresh_folder_list')
class SideBarItem:
def __init__(self, path, is_directory):
self._path = path
self._is_directory = is_directory
def path(self, path = ''):
if path == '':
return self._path
else:
self._path = path
self._is_directory = os.path.isdir(path)
return path
def pathWithoutProject(self):
path = self.path()
for directory in SideBarProject().getDirectories():
path = path.replace(directory, '', 1)
return path.replace('\\', '/')
def pathProject(self):
path = self.path()
for directory in SideBarProject().getDirectories():
path2 = path.replace(directory, '', 1)
if path2 != path:
return directory
return False
def url(self, type):
filenames = []
# scans a la htaccess
item = SideBarItem(self.path(), self.isDirectory())
while not os.path.exists(item.join('.sublime/SideBarEnhancements.json')):
if item.dirname() == item.path():
break;
item.path(item.dirname())
item = SideBarItem(item.join('.sublime/SideBarEnhancements.json'), False);
if item.exists():
filenames.append(item.path())
filenames.append(os.path.dirname(sublime.packages_path())+'/Settings/SideBarEnhancements.json')
import collections
for filename in filenames:
if os.path.lexists(filename):
import json
data = open(filename, 'r').read()
data = data.replace('\t', ' ').replace('\\', '/').replace('\\', '/').replace('//', '/').replace('//', '/').replace('http:/', 'http://').replace('https:/', 'https://')
data = json.loads(data, strict=False, object_pairs_hook=collections.OrderedDict)
for key in list(data.keys()):
# print('-------------------------------------------------------')
# print(key);
if filename == filenames[len(filenames)-1]:
base = expandVars(key)
else:
base = os.path.normpath(expandVars(os.path.dirname(os.path.dirname(filename))+'/'+key))
base = base.replace('\\', '/').replace('\\', '/').replace('//', '/').replace('//', '/')
# print(base)
current = self.path().replace('\\', '/').replace('\\', '/').replace('//', '/').replace('//', '/')
# print(current)
url_path = re.sub(re.compile("^"+re.escape(base), re.IGNORECASE), '', current);
# print(url_path)
if url_path != current:
url = data[key][type]
if url:
if url[-1:] != '/':
url = url+'/'
import urllib.request, urllib.parse, urllib.error
return url+(re.sub("^/", '', urllib.parse.quote(url_path)));
return False
def isUnderCurrentProject(self):
path = self.path()
path2 = self.path()
for directory in SideBarProject().getDirectories():
path2 = path2.replace(directory, '', 1)
return path != path2
def pathRelativeFromProject(self):
return re.sub('^/+', '', self.pathWithoutProject())
def pathRelativeFromProjectEncoded(self):
import urllib.request, urllib.parse, urllib.error
return urllib.parse.quote(self.pathRelativeFromProject())
def pathRelativeFromView(self):
return os.path.relpath(self.path(), os.path.dirname(sublime.active_window().active_view().file_name())).replace('\\', '/')
def pathRelativeFromViewEncoded(self):
import urllib.request, urllib.parse, urllib.error
return urllib.parse.quote(os.path.relpath(self.path(), os.path.dirname(sublime.active_window().active_view().file_name())).replace('\\', '/'))
def pathAbsoluteFromProject(self):
return self.pathWithoutProject()
def pathAbsoluteFromProjectEncoded(self):
import urllib.request, urllib.parse, urllib.error
return urllib.parse.quote(self.pathAbsoluteFromProject())
def uri(self):
uri = 'file:'+(self.path().replace('\\', '/').replace('//', '/'));
return uri
def join(self, name):
return os.path.join(self.path(), name)
def dirname(self):
branch, leaf = os.path.split(self.path())
return branch;
def forCwdSystemPath(self):
if self.isDirectory():
return self.path()
else:
return self.dirname()
def forCwdSystemName(self):
if self.isDirectory():
return '.'
else:
path = self.path()
branch = self.dirname()
leaf = path.replace(branch, '', 1).replace('\\', '').replace('/', '')
return leaf
def forCwdSystemPathRelativeFrom(self, relativeFrom):
relative = SideBarItem(relativeFrom, os.path.isdir(relativeFrom))
path = self.path().replace(relative.path(), '', 1).replace('\\', '/')
if path == '':
return '.'
else:
return re.sub('^/+', '', path)
def forCwdSystemPathRelativeFromRecursive(self, relativeFrom):
relative = SideBarItem(relativeFrom, os.path.isdir(relativeFrom))
path = self.path().replace(relative.path(), '', 1).replace('\\', '/')
if path == '':
return '.'
else:
if self.isDirectory():
return re.sub('^/+', '', path)+'/'
else:
return re.sub('^/+', '', path)
def dirnameCreate(self):
try:
self._makedirs(self.dirname())
except:
pass
def name(self):
branch, leaf = os.path.split(self.path())
return leaf;
def nameEncoded(self):
import urllib.request, urllib.parse, urllib.error
return urllib.parse.quote(self.name());
def namePretty(self):
return self.name().replace(self.extension(), '').replace('-', ' ').replace('_', ' ').strip();
def open(self, use_powershell = True):
if self.isDirectory():
import subprocess
if sublime.platform() == 'osx':
subprocess.Popen(['/Applications/Utilities/Terminal.app/Contents/MacOS/Terminal', '.'], cwd=self.forCwdSystemPath())
elif sublime.platform() == 'windows':
if use_powershell:
try:
subprocess.Popen(['start', 'powershell'], cwd=self.forCwdSystemPath(), shell=True)
except:
subprocess.Popen(['start', 'cmd', '.'], cwd=self.forCwdSystemPath(), shell=True)
else:
subprocess.Popen(['start', 'cmd', '.'], cwd=self.forCwdSystemPath(), shell=True)
elif sublime.platform() == 'linux':
subprocess.Popen(['gnome-terminal', '.'], cwd=self.forCwdSystemPath())
else:
if sublime.platform() == 'osx':
import subprocess
subprocess.Popen(['open', self.name()], cwd=self.dirname())
elif sublime.platform() == 'windows':
import subprocess
subprocess.Popen(['start', '', escapeCMDWindows(self.path())], cwd=self.dirname(), shell=True)
else:
from . import desktop
desktop.open(self.path())
print('using desktop')
def edit(self):
if BINARY.search(self.path()):
return None
else:
view = sublime.active_window().open_file(self.path())
view.settings().set('open_with_edit', True);
return view
def isDirectory(self):
return self._is_directory
def isFile(self):
return self.isDirectory() == False
def contentUTF8(self):
return open(self.path(), 'r', newline='', encoding='utf-8').read()
def contentBinary(self):
return open(self.path(), "rb").read()
def contentBase64(self):
import base64
base64text = base64.b64encode(self.contentBinary()).decode('utf-8')
return 'data:'+self.mime()+';charset=utf-8;base64,'+(base64text.replace('\n', ''))
def reveal(self):
if sublime.platform() == 'windows':
import subprocess
if self.isDirectory():
subprocess.Popen(["explorer", escapeCMDWindows(self.path())])
else:
subprocess.Popen(["explorer", '/select,', escapeCMDWindows(self.path())])
else:
sublime.active_window().run_command("open_dir", {"dir": self.dirname(), "file": self.name()} )
def write(self, content):
with open(self.path(), 'w+', encoding='utf8', newline='') as f:
f.write(str(content))
if 3000 <= int(sublime.version()) < 3088:
# Fixes as best as possible a new file permissions issue
# See https://github.com/titoBouzout/SideBarEnhancements/issues/203
# See https://github.com/SublimeTextIssues/Core/issues/239
oldmask = os.umask(0o000)
if oldmask == 0:
os.chmod(self.path(), 0o644)
os.umask(oldmask)
def mime(self):
import mimetypes
return mimetypes.guess_type(self.path())[0] or 'application/octet-stream'
def extension(self):
try:
return re.compile('(\.[^\.]+(\.[^\.]{2,4})?)$').findall('name'+self.name())[0][0].lower()
except:
return os.path.splitext('name'+self.name())[1].lower()
def exists(self):
return os.path.isdir(self.path()) or os.path.isfile(self.path())
def overwrite(self):
overwrite = sublime.ok_cancel_dialog("Destination exists", "Delete, and overwrite")
if overwrite:
from SideBarEnhancements.send2trash import send2trash
send2trash(self.path())
return True
else:
return False
def create(self):
if self.isDirectory():
self.dirnameCreate()
self._makedirs(self.path())
else:
self.dirnameCreate()
self.write('')
def _makedirs(self, path):
if 3000 <= int(sublime.version()) < 3088:
# Fixes as best as possible a new directory permissions issue
# See https://github.com/titoBouzout/SideBarEnhancements/issues/203
# See https://github.com/SublimeTextIssues/Core/issues/239
oldmask = os.umask(0o000)
if oldmask == 0:
os.makedirs(path, 0o755);
else:
os.makedirs(path);
os.umask(oldmask)
else:
os.makedirs(path)
def copy(self, location, replace = False):
location = SideBarItem(location, os.path.isdir(location));
if location.exists() and replace == False:
return False
elif location.exists() and location.isFile():
os.remove(location.path())
location.dirnameCreate();
if self.isDirectory():
if location.exists():
self.copyRecursive(self.path(), location.path())
else:
shutil.copytree(self.path(), location.path())
else:
shutil.copy2(self.path(), location.path())
return True
def copyRecursive(self, _from, _to):
if os.path.isfile(_from) or os.path.islink(_from):
try:
self._makedirs(os.path.dirname(_to));
except:
pass
if os.path.exists(_to):
os.remove(_to)
shutil.copy2(_from, _to)
else:
try:
self._makedirs(_to);
except:
pass
for content in os.listdir(_from):
__from = os.path.join(_from, content)
__to = os.path.join(_to, content)
self.copyRecursive(__from, __to)
def move(self, location, replace = False):
location = SideBarItem(location, os.path.isdir(location));
if location.exists() and replace == False:
if self.path().lower() == location.path().lower():
pass
else:
return False
elif location.exists() and location.isFile():
os.remove(location.path())
if self.path().lower() == location.path().lower():
location.dirnameCreate();
os.rename(self.path(), location.path()+'.sublime-temp')
os.rename(location.path()+'.sublime-temp', location.path())
self._moveMoveViews(self.path(), location.path())
else:
location.dirnameCreate();
if location.exists():
self.moveRecursive(self.path(), location.path())
else:
os.rename(self.path(), location.path())
self._moveMoveViews(self.path(), location.path())
return True
def moveRecursive(self, _from, _to):
if os.path.isfile(_from) or os.path.islink(_from):
try:
self._makedirs(os.path.dirname(_to));
except:
pass
if os.path.exists(_to):
os.remove(_to)
os.rename(_from, _to)
else:
try:
self._makedirs(_to);
except:
pass
for content in os.listdir(_from):
__from = os.path.join(_from, content)
__to = os.path.join(_to, content)
self.moveRecursive(__from, __to)
os.rmdir(_from)
def _moveMoveViews(self, old, location):
for window in sublime.windows():
active_view = window.active_view()
views = []
for view in window.views():
if view.file_name():
views.append(view)
views.reverse();
for view in views:
if old == view.file_name():
active_view = self._moveMoveView(window, view, location, active_view)
elif view.file_name().find(old+'\\') == 0:
active_view = self._moveMoveView(window, view, view.file_name().replace(old+'\\', location+'\\', 1), active_view)
elif view.file_name().find(old+'/') == 0:
active_view = self._moveMoveView(window, view, view.file_name().replace(old+'/', location+'/', 1), active_view)
def _moveMoveView(self, window, view, location, active_view):
view.retarget(location)
def closeViews(self):
path = self.path()
closed_items = []
for window in sublime.windows():
active_view = window.active_view()
views = []
for view in window.views():
if view.file_name():
views.append(view)
views.reverse();
for view in views:
if path == view.file_name() or view.file_name().find(path+'\\') == 0 or view.file_name().find(path+'/') == 0:
if view.window():
closed_items.append([view.file_name(), view.window(), view.window().get_view_index(view)])
if len(window.views()) == 1:
window.new_file()
window.focus_view(view)
window.run_command('revert')
window.run_command('close')
# try to repaint
try:
window.focus_view(active_view)
window.focus_view(window.active_view())
except:
try:
window.focus_view(window.active_view())
except:
pass
return closed_items
| |
# Copyright 2013 Locaweb.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: Francisco Freire, Locaweb.
# @author: Thiago Morello (morellon), Locaweb.
# @author: Willian Molinari (PotHix), Locaweb.
# @author: Juliano Martinez (ncode), Locaweb.
from gevent import monkey
monkey.patch_all()
import os
import grp
import pwd
import json
import time
import base64
import logging
import bottle
from bottle import delete, put, get, post, error, redirect, run, debug
from bottle import abort, request, ServerAdapter, response, static_file
from bottle import error, HTTPError
from simplestack.common.config import config
from simplestack.common.logger import set_logger
app = bottle.app()
LOG = logging.getLogger('simplestack.server')
@error(500)
def custom500(error):
response.content_type = "application/json"
traceback = None
if type(error) is HTTPError:
traceback = error.traceback
error = error.exception
error_class = error.__class__.__name__
LOG.error("%s: %s", error_class, error)
return json.dumps({
"error": error_class,
"message": str(error),
"traceback": traceback
})
@error(404)
def error404(error):
response.content_type = 'application/json'
error_class = error.__class__.__name__
return json.dumps({
"error": error_class,
"message": error.output
})
@get('/:hypervisor/:host')
def pool_info(hypervisor, host):
"""
Get pool information
::
GET /:hypervisor/:host
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
pool_info = manager.pool_info()
manager.logout()
return json.dumps(pool_info)
@get('/:hypervisor/:host/hosts')
def host_list(hypervisor, host):
"""
Get hosts for a given pool
::
GET /:hypervisor/:host/hosts
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
host_list = manager.host_list()
manager.logout()
return json.dumps(host_list)
@get('/:hypervisor/:host/hosts/:host_id')
def host_info(hypervisor, host, host_id):
"""
Get host info
::
GET /:hypervisor/:host/hosts/:host_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
host_info = manager.host_info(host_id)
manager.logout()
return json.dumps(host_info)
@get('/:hypervisor/:host/storages')
def storage_list(hypervisor, host):
"""
Get storages for a given pool
::
GET /:hypervisor/:host/storages
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
storage_list = manager.storage_list()
manager.logout()
return json.dumps(storage_list)
@get('/:hypervisor/:host/storages/:storage_id')
def storage_info(hypervisor, host, storage_id):
"""
Get storage info
::
GET /:hypervisor/:host/storages/:storage_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
storage_info = manager.storage_info(storage_id)
manager.logout()
return json.dumps(storage_info)
@post('/:hypervisor/:host/storages/:storage_id/guests')
def storage_guest_import(hypervisor, host, storage_id):
"""
Import a new guest
::
POST /:hypervisor/:host/storages/:storage_id/guests
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
guest = manager.guest_import(
request.environ['wsgi.input'],
request.content_length,
storage_id
)
location = "/%s/%s/guests/%s" % (hypervisor, host, guest["id"])
response.set_header("Location", location)
manager.logout()
return json.dumps(guest)
@get('/:hypervisor/:host/guests')
def guest_list(hypervisor, host):
"""
Get guests for a given pool
::
GET /:hypervisor/:host/guests
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
guest_list = manager.guest_list()
manager.logout()
return json.dumps(guest_list)
@post('/:hypervisor/:host/guests')
def guest_create(hypervisor, host):
"""
Create a new guest
::
POST /:hypervisor/:host/guests
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
guest = manager.guest_create(data)
location = "/%s/%s/guests/%s" % (hypervisor, host, guest["id"])
response.set_header("Location", location)
manager.logout()
return json.dumps(guest)
@post('/:hypervisor/:host/guests')
def guest_import(hypervisor, host):
"""
Import a new guest
::
POST /:hypervisor/:host/guests
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
guest = manager.guest_import(
request.environ['wsgi.input'],
request.content_length
)
location = "/%s/%s/guests/%s" % (hypervisor, host, guest["id"])
response.set_header("Location", location)
manager.logout()
return json.dumps(guest)
@get('/:hypervisor/:host/guests/:guest_id')
def guest_info(hypervisor, host, guest_id):
"""
Get guest informations
::
GET /:hypervisor/:host/guests/:guest_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
guest_id = manager.guest_info(guest_id)
manager.logout()
return json.dumps(guest_id)
@put('/:hypervisor/:host/guests/:guest_id')
def guest_update(hypervisor, host, guest_id):
"""
Update guest informations
::
PUT /:hypervisor/:host
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
guest_data = manager.guest_update(guest_id, data)
manager.logout()
return json.dumps(guest_data)
@post('/:hypervisor/:host/guests/:guest_id/clone')
def guest_clone(hypervisor, host, guest_id):
"""
Clone a guest
::
POST /:hypervisor/:host/guests/:guest_id/clone
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
guest = manager.guest_clone(guest_id, data)
location = "/%s/%s/guests/%s" % (hypervisor, host, guest["id"])
response.set_header("Location", location)
manager.logout()
return json.dumps(guest)
@get('/:hypervisor/:host/guests/:guest_id/export')
def guest_export(hypervisor, host, guest_id):
"""
Export guest file
::
GET /:hypervisor/:host/guests/:guest_id/export
"""
response.content_type = "application/octet-stream"
manager = create_manager(hypervisor, host)
export_response, export_length = manager.guest_export(guest_id)
response_part = export_response.read(1024)
while response_part:
yield response_part
response_part = export_response.read(1024)
manager.logout()
@delete('/:hypervisor/:host/guests/:guest_id')
def guest_delete(hypervisor, host, guest_id):
"""
Deletes guest
::
DELETE /:hypervisor/:host/guests/:guest_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
guest_delete = manager.guest_delete(guest_id)
manager.logout()
return json.dumps(guest_delete)
@get('/:hypervisor/:host/guests/:guest_id/disks')
def disk_list(hypervisor, host, guest_id):
"""
Get all disks for a given guest
::
GET /:hypervisor/:host/guests/:guest_id/disks
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
disk_list = manager.disk_list(guest_id)
manager.logout()
return json.dumps(disk_list)
@post('/:hypervisor/:host/guests/:guest_id/disks')
def disk_create(hypervisor, host, guest_id):
"""
Create a disk for a given guest
::
POST /:hypervisor/:host/guests/:guest_id/disks
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
disk = manager.disk_create(guest_id, data)
location = "/%s/%s/guests/%s/disks/%s" % (
hypervisor, host, guest_id, disk["id"]
)
response.set_header("Location", location)
manager.logout()
return json.dumps(disk)
@get('/:hypervisor/:host/guests/:guest_id/disks/:disk_id')
def disk_info(hypervisor, host, guest_id, disk_id):
"""
Get a disk in a given guest
::
GET /:hypervisor/:host/guests/:guest_id/disks/:disk_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
nwi_info = manager.disk_info(guest_id, disk_id)
manager.logout()
return json.dumps(nwi_info)
@put('/:hypervisor/:host/guests/:guest_id/disks/:disk_id')
def disk_update(hypervisor, host, guest_id, disk_id):
"""
Update a disk in a given guest
::
PUT /:hypervisor/:host/guests/:guest_id/disks/:disk_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
nwi_info = manager.disk_update(guest_id, disk_id, data)
manager.logout()
return json.dumps(nwi_info)
@delete('/:hypervisor/:host/guests/:guest_id/disks/:disk_id')
def disk_delete(hypervisor, host, guest_id, disk_id):
"""
Delete a disk from a given guest
::
DELETE /:hypervisor/:host/guests/:guest_id/disks/:disk_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
manager.disk_delete(guest_id, disk_id)
manager.logout()
@put('/:hypervisor/:host/guests/:guest_id/media_device')
def media_mount(hypervisor, host, guest_id):
"""
Mounts an ISO to a CD/DVD drive
::
PUT /:hypervisor/:host/guests/:guest_id/media_device
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
manager.media_mount(guest_id, data)
manager.logout()
@get('/:hypervisor/:host/guests/:guest_id/media_device')
def media_info(hypervisor, host, guest_id):
"""
Gets the mounted media device name
::
GET /:hypervisor/:host/guests/:guest_id/media_device
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
media_info = manager.media_info(guest_id)
manager.logout()
return json.dumps(media_info)
@get('/:hypervisor/:host/networks')
def network_list(hypervisor, host):
"""
Get networks for a given pool
::
GET /:hypervisor/:host/networks
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
network_list = manager.network_list()
manager.logout()
return json.dumps(network_list)
@post('/:hypervisor/:host/networks')
def network_vlan_create(hypervisor, host):
"""
Create a Network with a tagged VLAN
::
POST /:hypervisor/:host/networks
The body should contain a JSON object. The required keys should vary for
each hypervisor.
Xen example:
{"name": "VLAN2",
"description": "VLAN 2 storage",
"from_network": "BOND1",
"vlan": 2,
"other_config": {}}
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
network_ref = manager.network_vlan_create(data["name"], data["description"], data["from_network"], data["vlan"], data["other_config"])
location = "/%s/%s/networks/%s" % (
hypervisor, host, network_ref
)
response.set_header("Location", location)
manager.logout()
return json.dumps(network_ref)
@get('/:hypervisor/:host/networks/:network')
def network_info(hypervisor, host, network):
"""
Get network info
::
GET /:hypervisor/:host/networks/:network
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
network_info = manager.network_info(network)
manager.logout()
return json.dumps(network_info)
@get('/:hypervisor/:host/guests/:guest_id/network_interfaces')
def network_interface_list(hypervisor, host, guest_id):
"""
Get all network interfaces for a given guest
::
GET /:hypervisor/:host/guests/:guest_id/network_interfaces
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
netiface_list = manager.network_interface_list(guest_id)
manager.logout()
return json.dumps(netiface_list)
@post('/:hypervisor/:host/guests/:guest_id/network_interfaces')
def network_interface_create(hypervisor, host, guest_id):
"""
Create a network interface for a given guest
::
POST /:hypervisor/:host/guests/:guest_id/network_interfaces
The body should contain a JSON object. The required keys should vary for
each hypervisor.
Xen example:
{"network": "THE NETWORK NAME"}
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
network_interface = manager.network_interface_create(guest_id, data)
location = "/%s/%s/guests/%s/network_interfaces/%s" % (
hypervisor, host, guest_id, network_interface["id"]
)
response.set_header("Location", location)
manager.logout()
return json.dumps(network_interface)
@get('/:hypervisor/:host/guests/:guest_id/network_interfaces/:interface_id')
def network_interface_info(hypervisor, host, guest_id, interface_id):
"""
Get a network interface in a given guest
::
GET /:hypervisor/:host/guests/:guest_id/network_interfaces/:interface_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
nwi_info = manager.network_interface_info(guest_id, interface_id)
manager.logout()
return json.dumps(nwi_info)
@put('/:hypervisor/:host/guests/:guest_id/network_interfaces/:interface_id')
def network_interface_update(hypervisor, host, guest_id, interface_id):
"""
Update a network interface in a given guest
::
PUT /:hypervisor/:host/guests/:guest_id/network_interfaces/:interface_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
nwi_info = manager.network_interface_update(guest_id, interface_id, data)
manager.logout()
return json.dumps(nwi_info)
@delete('/:hypervisor/:host/guests/:guest_id/network_interfaces/:interface_id')
def network_interface_delete(hypervisor, host, guest_id, interface_id):
"""
Delete a network interface from a given guest
::
DELETE /:hypervisor/:host/guests/:guest_id/network_interfaces/:if_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
manager.network_interface_delete(guest_id, interface_id)
manager.logout()
@get('/:hypervisor/:host/guests/:guest_id/tags')
def tag_list(hypervisor, host, guest_id):
"""
Get all tags for a given guest
::
GET /:hypervisor/:host/guests/:guest_id/tags
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
tag_list = manager.tag_list(guest_id)
manager.logout()
return json.dumps(tag_list)
@post('/:hypervisor/:host/guests/:guest_id/tags')
def tag_create(hypervisor, host, guest_id):
"""
Create a new tag for a given guest
::
POST /:hypervisor/:host/guests/:guest_id/tags
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
tag = manager.tag_create(guest_id, data.get('name'))
#TODO: Should we return the Location for the first tag?
location = "/%s/%s/guests/%s/tags/%s" % (
hypervisor, host, guest_id, tag[0]
)
response.set_header("Location", location)
manager.logout()
return json.dumps(tag)
@delete('/:hypervisor/:host/guests/:guest_id/tags/:tag_name')
def tag_delete(hypervisor, host, guest_id, tag_name):
"""
Delete a given tag for a given guest
::
DELETE /:hypervisor/:host/guests/:guest_id/tags
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
tag_delete = manager.tag_delete(guest_id, tag_name)
manager.logout()
return json.dumps(tag_delete)
@get('/:hypervisor/:host/guests/:guest_id/snapshots')
def snapshot_list(hypervisor, host, guest_id):
"""
Get all snapshots for a given guest
::
GET /:hypervisor/:host/guests/:guest_id/snapshots
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
snapshot_list = manager.snapshot_list(guest_id)
manager.logout()
return json.dumps(snapshot_list)
@post('/:hypervisor/:host/guests/:guest_id/snapshots')
def snapshot_create(hypervisor, host, guest_id):
"""
Create a snapshot for a given guest
::
POST /:hypervisor/:host/guests/:guest_id/snapshots
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
snapshot = manager.snapshot_create(guest_id, data.get('name'))
location = "/%s/%s/guests/%s/snapshots/%s" % (
hypervisor, host, guest_id, snapshot["id"]
)
response.set_header("Location", location)
manager.logout()
return json.dumps(snapshot)
@get('/:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id')
def snapshot_info(hypervisor, host, guest_id, snapshot_id):
"""
Get snapshot informations for a given guest_id and snapshot_id
::
GET /:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
snapshot_info = manager.snapshot_info(guest_id, snapshot_id)
manager.logout()
return json.dumps(snapshot_info)
@put('/:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id/revert')
def snapshot_revert(hypervisor, host, guest_id, snapshot_id):
"""
Remove a snapshot for a given guest_id and snapshot_id
::
PUT /:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id/revert
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
snapshot_revert = manager.snapshot_revert(guest_id, snapshot_id)
manager.logout()
return json.dumps(snapshot_revert)
@delete('/:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id')
def snapshot_delete(hypervisor, host, guest_id, snapshot_id):
"""
Remove a snapshot for a given guest_id and snapshot_id
::
DELETE /:hypervisor/:host/guests/:guest_id/snapshots/:snapshot_id
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
snapshot_delete = manager.snapshot_delete(guest_id, snapshot_id)
manager.logout()
return json.dumps(snapshot_delete)
@put('/:hypervisor/:host/guests/:guest_id/reboot')
def reboot_guest(hypervisor, host, guest_id):
"""
Reboot a guest based on the given guest_id
::
PUT /:hypervisor/:host/guests/:guest_id/reboot
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
force = False
if data:
data = json.loads(data)
force = data.get('force')
manager.guest_reboot(guest_id, force=force)
manager.logout()
return json.dumps({"action": "reboot", "message": "ok"})
@put('/:hypervisor/:host/guests/:guest_id/power')
def power_guest(hypervisor, host, guest_id):
"""
Turn a guest on/off based on a given guest_id
::
PUT /:hypervisor/:host/guests/:guest_id/power
"""
response.content_type = "application/json"
manager = create_manager(hypervisor, host)
data = request.body.readline()
if not data:
abort(400, 'No data received')
data = json.loads(data)
state = data['state']
if state == "force_stop":
manager.guest_shutdown(guest_id, force=True)
elif state == "start":
manager.guest_start(guest_id)
elif state == "stop":
manager.guest_shutdown(guest_id, force=False)
elif state == "pause":
manager.guest_suspend(guest_id)
elif state == "resume":
manager.guest_resume(guest_id)
manager.logout()
return json.dumps({"action": state, "message": "ok"})
def parse_token(token):
decoded_token = base64.b64decode(token).split(':')
username = decoded_token.pop(0)
password = ':'.join(decoded_token)
return (username, password)
def create_manager(hypervisor, host):
hypervisor_token = request.headers.get("x-simplestack-hypervisor-token")
if not hypervisor_token:
abort(401, 'No x-simplestack-hypervisor-token header provided')
username, password = parse_token(hypervisor_token)
module = __import__("simplestack.hypervisors.%s" % hypervisor)
module = getattr(module.hypervisors, hypervisor)
return module.Stack({
"api_server": host,
"username": username,
"password": password
})
def main(action):
if not action == "foreground":
os.setgid(grp.getgrnam('nogroup')[2])
os.setuid(pwd.getpwnam(config.get("server", "user"))[2])
debug(config.getboolean("server", "debug"))
port = config.getint("server", "port")
bind_addr = config.get("server", "bind_addr")
set_logger()
LOG.info("Starting Simplestack server")
run(host=bind_addr, port=port, server="gevent")
| |
"""Support for Denon AVR receivers using their HTTP interface."""
from collections import namedtuple
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL, MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOUND_MODE, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF,
SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP)
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_TIMEOUT, CONF_ZONE, STATE_OFF, STATE_ON,
STATE_PAUSED, STATE_PLAYING)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_SOUND_MODE_RAW = 'sound_mode_raw'
CONF_INVALID_ZONES_ERR = 'Invalid Zone (expected Zone2 or Zone3)'
CONF_SHOW_ALL_SOURCES = 'show_all_sources'
CONF_VALID_ZONES = ['Zone2', 'Zone3']
CONF_ZONES = 'zones'
DEFAULT_SHOW_SOURCES = False
DEFAULT_TIMEOUT = 2
KEY_DENON_CACHE = 'denonavr_hosts'
SUPPORT_DENON = SUPPORT_VOLUME_STEP | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | \
SUPPORT_SELECT_SOURCE | SUPPORT_VOLUME_SET
SUPPORT_MEDIA_MODES = SUPPORT_PLAY_MEDIA | \
SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_VOLUME_SET | SUPPORT_PLAY
DENON_ZONE_SCHEMA = vol.Schema({
vol.Required(CONF_ZONE): vol.In(CONF_VALID_ZONES, CONF_INVALID_ZONES_ERR),
vol.Optional(CONF_NAME): cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_SHOW_ALL_SOURCES, default=DEFAULT_SHOW_SOURCES):
cv.boolean,
vol.Optional(CONF_ZONES):
vol.All(cv.ensure_list, [DENON_ZONE_SCHEMA]),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
})
NewHost = namedtuple('NewHost', ['host', 'name'])
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Denon platform."""
import denonavr
# Initialize list with receivers to be started
receivers = []
cache = hass.data.get(KEY_DENON_CACHE)
if cache is None:
cache = hass.data[KEY_DENON_CACHE] = set()
# Get config option for show_all_sources and timeout
show_all_sources = config.get(CONF_SHOW_ALL_SOURCES)
timeout = config.get(CONF_TIMEOUT)
# Get config option for additional zones
zones = config.get(CONF_ZONES)
if zones is not None:
add_zones = {}
for entry in zones:
add_zones[entry[CONF_ZONE]] = entry.get(CONF_NAME)
else:
add_zones = None
# Start assignment of host and name
new_hosts = []
# 1. option: manual setting
if config.get(CONF_HOST) is not None:
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
new_hosts.append(NewHost(host=host, name=name))
# 2. option: discovery using netdisco
if discovery_info is not None:
host = discovery_info.get('host')
name = discovery_info.get('name')
new_hosts.append(NewHost(host=host, name=name))
# 3. option: discovery using denonavr library
if config.get(CONF_HOST) is None and discovery_info is None:
d_receivers = denonavr.discover()
# More than one receiver could be discovered by that method
for d_receiver in d_receivers:
host = d_receiver["host"]
name = d_receiver["friendlyName"]
new_hosts.append(
NewHost(host=host, name=name))
for entry in new_hosts:
# Check if host not in cache, append it and save for later
# starting
if entry.host not in cache:
new_device = denonavr.DenonAVR(
host=entry.host, name=entry.name,
show_all_inputs=show_all_sources, timeout=timeout,
add_zones=add_zones)
for new_zone in new_device.zones.values():
receivers.append(DenonDevice(new_zone))
cache.add(host)
_LOGGER.info("Denon receiver at host %s initialized", host)
# Add all freshly discovered receivers
if receivers:
add_entities(receivers)
class DenonDevice(MediaPlayerDevice):
"""Representation of a Denon Media Player Device."""
def __init__(self, receiver):
"""Initialize the device."""
self._receiver = receiver
self._name = self._receiver.name
self._muted = self._receiver.muted
self._volume = self._receiver.volume
self._current_source = self._receiver.input_func
self._source_list = self._receiver.input_func_list
self._state = self._receiver.state
self._power = self._receiver.power
self._media_image_url = self._receiver.image_url
self._title = self._receiver.title
self._artist = self._receiver.artist
self._album = self._receiver.album
self._band = self._receiver.band
self._frequency = self._receiver.frequency
self._station = self._receiver.station
self._sound_mode_support = self._receiver.support_sound_mode
if self._sound_mode_support:
self._sound_mode = self._receiver.sound_mode
self._sound_mode_raw = self._receiver.sound_mode_raw
self._sound_mode_list = self._receiver.sound_mode_list
else:
self._sound_mode = None
self._sound_mode_raw = None
self._sound_mode_list = None
self._supported_features_base = SUPPORT_DENON
self._supported_features_base |= (self._sound_mode_support and
SUPPORT_SELECT_SOUND_MODE)
def update(self):
"""Get the latest status information from device."""
self._receiver.update()
self._name = self._receiver.name
self._muted = self._receiver.muted
self._volume = self._receiver.volume
self._current_source = self._receiver.input_func
self._source_list = self._receiver.input_func_list
self._state = self._receiver.state
self._power = self._receiver.power
self._media_image_url = self._receiver.image_url
self._title = self._receiver.title
self._artist = self._receiver.artist
self._album = self._receiver.album
self._band = self._receiver.band
self._frequency = self._receiver.frequency
self._station = self._receiver.station
if self._sound_mode_support:
self._sound_mode = self._receiver.sound_mode
self._sound_mode_raw = self._receiver.sound_mode_raw
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
# Volume is sent in a format like -50.0. Minimum is -80.0,
# maximum is 18.0
return (float(self._volume) + 80) / 100
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""Return a list of available input sources."""
return self._source_list
@property
def sound_mode(self):
"""Return the current matched sound mode."""
return self._sound_mode
@property
def sound_mode_list(self):
"""Return a list of available sound modes."""
return self._sound_mode_list
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._current_source in self._receiver.netaudio_func_list:
return self._supported_features_base | SUPPORT_MEDIA_MODES
return self._supported_features_base
@property
def media_content_id(self):
"""Content ID of current playing media."""
return None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self._state == STATE_PLAYING or self._state == STATE_PAUSED:
return MEDIA_TYPE_MUSIC
return MEDIA_TYPE_CHANNEL
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source in self._receiver.playing_func_list:
return self._media_image_url
return None
@property
def media_title(self):
"""Title of current playing media."""
if self._current_source not in self._receiver.playing_func_list:
return self._current_source
if self._title is not None:
return self._title
return self._frequency
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
if self._artist is not None:
return self._artist
return self._band
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
if self._album is not None:
return self._album
return self._station
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return None
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return None
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only."""
return None
@property
def media_season(self):
"""Season of current playing media, TV show only."""
return None
@property
def media_episode(self):
"""Episode of current playing media, TV show only."""
return None
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attributes = {}
if (self._sound_mode_raw is not None and self._sound_mode_support and
self._power == 'ON'):
attributes[ATTR_SOUND_MODE_RAW] = self._sound_mode_raw
return attributes
def media_play_pause(self):
"""Simulate play pause media player."""
return self._receiver.toggle_play_pause()
def media_previous_track(self):
"""Send previous track command."""
return self._receiver.previous_track()
def media_next_track(self):
"""Send next track command."""
return self._receiver.next_track()
def select_source(self, source):
"""Select input source."""
return self._receiver.set_input_func(source)
def select_sound_mode(self, sound_mode):
"""Select sound mode."""
return self._receiver.set_sound_mode(sound_mode)
def turn_on(self):
"""Turn on media player."""
if self._receiver.power_on():
self._state = STATE_ON
def turn_off(self):
"""Turn off media player."""
if self._receiver.power_off():
self._state = STATE_OFF
def volume_up(self):
"""Volume up the media player."""
return self._receiver.volume_up()
def volume_down(self):
"""Volume down media player."""
return self._receiver.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
# Volume has to be sent in a format like -50.0. Minimum is -80.0,
# maximum is 18.0
volume_denon = float((volume * 100) - 80)
if volume_denon > 18:
volume_denon = float(18)
try:
if self._receiver.set_volume(volume_denon):
self._volume = volume_denon
except ValueError:
pass
def mute_volume(self, mute):
"""Send mute command."""
return self._receiver.mute(mute)
| |
from async_geocoder import AsyncGeocoder
import asyncio
from shapely.geometry import Point, LineString
import json
import sys
import logging
import re
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
log = logging.getLogger()
class ElasticGeocoder(AsyncGeocoder):
"""
Implements AsyncGeocoder with an Elasticsearch instance managed through a
docker-compose container. Works for two types of geocoding from the CFPB
grasshopper-loader repo, address point and TIGER ADDRFEAT data. Interpolates
any ADDRFEAT address range in order to get a single point.
"""
conn_limit = 100
q_type = 'census'
es_host = None
# Mapping of columns to desired ones here, substitute other columns names
# as the new keys, while the values should remain the same
col_map = {
'ID': 'id',
'household_id': 'id',
'HOUSEHOLD_ID': 'id',
'ADDRESS_NUMBER': 'address_number',
'STREET_NAME': 'street_name',
'STREET_NAME_POST_TYPE': 'street_name_post_type',
'PLACE_NAME': 'place_name',
'STATE_NAME': 'state_name',
'ZIP_CODE': 'zip_code'
}
def __init__(self, *args, **kwargs):
super(ElasticGeocoder, self).__init__(self, *args, **kwargs)
self.es_url = 'http://{}:9200/{}/_search'.format(self.es_host, self.q_type)
async def request_geocoder(self, client, row):
# Replace col names
row = dict(row)
for k, v in self.col_map.items():
if k in row:
row[v] = row.pop(k, None)
if self.q_type == 'census':
query_data = await self.create_census_query(row)
elif self.q_type == 'address':
query_data = await self.create_point_query(row)
async with client.post(self.es_url, data=json.dumps(query_data)) as response:
response_json = await response.json()
if not 'hits' in response_json:
return row['id'], None
elif response_json['hits'].get('hits', 0) == 0:
return row['id'], None
elif len(response_json['hits']['hits']) == 0:
return row['id'], None
addr_hit = response_json['hits']['hits'][0]
if self.q_type == 'address':
geom_dict = dict(lon=addr_hit['geometry']['coordinates'][0],
lat=addr_hit['geometry']['coordinates'][1])
elif self.q_type == 'census':
geom_dict = await self.interpolate_census(row, addr_hit)
return row['id'], geom_dict
async def handle_census_range(self, range_from, range_to):
from_int = 0
to_int = 0
if range_from:
from_int = int(range_from) if range_from.isdigit() else 0
if range_to:
to_int = int(range_to) if range_to.isdigit() else 0
range_even = from_int % 2 == 0 and to_int % 2 == 0
if from_int > to_int:
range_diff = from_int - to_int
else:
range_diff = to_int - from_int
return {
'is_even': range_even,
'from_int': from_int,
'to_int': to_int,
'range_diff': range_diff
}
async def interpolate_census(self, data, res_data):
tiger_feat = res_data['_source']
data_line = LineString(
[Point(*p) for p in tiger_feat['geometry']['coordinates']]
)
line_len = data_line.length
if data['address_number']:
addr_int = int(re.sub('[^0-9]', '', str(data['address_number'])))
else:
addr_int = 0
addr_is_even = addr_int % 2 == 0
l_range = await self.handle_census_range(
tiger_feat['properties']['LFROMHN'],
tiger_feat['properties']['LTOHN']
)
r_range = await self.handle_census_range(
tiger_feat['properties']['RFROMHN'],
tiger_feat['properties']['RTOHN']
)
if addr_is_even == l_range['is_even']:
tiger_range = l_range
elif addr_is_even == r_range['is_even']:
tiger_range = r_range
else:
# TODO: Throw error, for now default to l_range
tiger_range = l_range
# Check for divide by zero errors, otherwise create distance
if tiger_range['range_diff'] == 0:
range_dist = 0
elif tiger_range['from_int'] > tiger_range['to_int']:
range_dist = ((tiger_range['from_int'] - addr_int) /
tiger_range['range_diff']) * line_len
else:
range_dist = ((addr_int - tiger_range['from_int']) /
tiger_range['range_diff']) * line_len
inter_pt = data_line.interpolate(range_dist)
return {'lat': inter_pt.y, 'lon': inter_pt.x}
async def create_point_query(self, data):
point_query = {
'query': {
'bool': {
'must': [
{'term': {'properties.number': data['address_number']}},
{'term': {'properties.state': data['state_name'].lower()}}
],
'should': [
{'term': {'properties.zip': str(data['zip_code'])}},
{'term': {'properties.city': data['place_name'].lower()}}
]
}
}
}
if data.get('street_name_post_type'):
point_query['query']['bool']['should'].append(
{'term': {'properties.street': data['street_name_post_type'].lower()}}
)
if data.get('street_name'):
for s in data['street_name'].split(' '):
point_query['query']['bool']['must'].append(
{'term': {"properties.street": s.lower()}}
)
return point_query
async def create_census_query(self, data):
census_query = {
'query': {
'bool': {
'must': [
{
'bool': {
'should': [
{
'bool': {
'must': [
{
'bool': {
'should': [
{'range': {
'properties.LFROMHN': {'lte': data['address_number']}}},
{'range': {
'properties.RFROMHN': {'lte': data['address_number']}}}
]
}
},
{
'bool': {
'should': [
{'range': {
'properties.LTOHN': {'gte': data['address_number']}}},
{'range': {
'properties.RTOHN': {'gte': data['address_number']}}}
]
}
}
]
}
},
{
'bool': {
'must': [
{
'bool': {
'should': [
{'range': {
'properties.LFROMHN': {'gte': data['address_number']}}},
{'range': {
'properties.RFROMHN': {'gte': data['address_number']}}}
]
}
},
{
'bool': {
'should': [
{'range': {
'properties.LTOHN': {'lte': data['address_number']}}},
{'range': {
'properties.RTOHN': {'lte': data['address_number']}}}
]
}
}
]
}
}
]
}
}
],
'should': [],
'filter': {
'bool': {
'must': [
{'term': {'properties.STATE': data['state_name'].lower()}},
{
'bool': {
'should': [
{'term': {'properties.ZIPL': str(data['zip_code'])}},
{'term': {'properties.ZIPR': str(data['zip_code'])}}
]
}
}
]
}
}
}
}
}
if data.get('street_name_post_type'):
census_query['query']['bool']['should'].append(
{'term': {'properties.FULLNAME': data['street_name_post_type'].lower()}}
)
if data.get('street_name'):
for s in data['street_name'].split(' '):
census_query['query']['bool']['must'].append(
{'term': {"properties.FULLNAME": s.lower()}}
)
if data['place_name']:
for p in data['place_name'].split(' '):
census_query['query']['bool']['should'].append(
{'term': {'properties.PLACE': p.lower()}}
)
return census_query
| |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Indicator'
db.delete_table(u'survey_indicator')
# Deleting field 'Question.indicator'
db.delete_column(u'survey_question', 'indicator_id')
# Adding field 'Question.batch'
db.add_column(u'survey_question', 'batch',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='questions', null=True, to=orm['survey.Batch']),
keep_default=False)
def backwards(self, orm):
# Adding model 'Indicator'
db.create_table(u'survey_indicator', (
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('batch', self.gf('django.db.models.fields.related.ForeignKey')(related_name='indicators', null=True, to=orm['survey.Batch'])),
('order', self.gf('django.db.models.fields.PositiveIntegerField')(max_length=2, null=True)),
('identifier', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('survey', ['Indicator'])
# Adding field 'Question.indicator'
db.add_column(u'survey_question', 'indicator',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='questions', null=True, to=orm['survey.Indicator']),
keep_default=False)
# Deleting field 'Question.batch'
db.delete_column(u'survey_question', 'batch_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'locations.location': {
'Meta': {'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'parent_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'point': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Point']", 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['locations.Location']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations'", 'null': 'True', 'to': u"orm['locations.LocationType']"})
},
u'locations.locationtype': {
'Meta': {'object_name': 'LocationType'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'primary_key': 'True'})
},
u'locations.point': {
'Meta': {'object_name': 'Point'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'})
},
'survey.answerrule': {
'Meta': {'object_name': 'AnswerRule'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'condition': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'next_question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_question_rules'", 'null': 'True', 'to': "orm['survey.Question']"}),
'question': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'rule'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Question']"}),
'validate_with_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.QuestionOption']", 'null': 'True'}),
'validate_with_question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'validate_with_value': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'})
},
'survey.backend': {
'Meta': {'object_name': 'Backend'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'survey.batch': {
'Meta': {'object_name': 'Batch'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'})
},
'survey.batchlocationstatus': {
'Meta': {'object_name': 'BatchLocationStatus'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'open_locations'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'open_batches'", 'null': 'True', 'to': u"orm['locations.Location']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
'survey.children': {
'Meta': {'object_name': 'Children'},
'aged_between_0_5_months': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_12_23_months': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_13_17_years': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_24_59_months': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_5_12_years': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_6_11_months': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'children'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
'survey.formula': {
'Meta': {'object_name': 'Formula'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'denominator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'as_denominator'", 'to': "orm['survey.Question']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'numerator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'as_numerator'", 'to': "orm['survey.Question']"})
},
'survey.household': {
'Meta': {'object_name': 'Household'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'households'", 'null': 'True', 'to': "orm['survey.Investigator']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'number_of_females': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'number_of_males': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'survey.householdbatchcompletion': {
'Meta': {'object_name': 'HouseholdBatchCompletion'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_households'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_batches'", 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_batches'", 'null': 'True', 'to': "orm['survey.Investigator']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
'survey.householdhead': {
'Meta': {'object_name': 'HouseholdHead'},
'age': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'household': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'head'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'default': "'Primary'", 'max_length': '100', 'null': 'True'}),
'male': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'default': "'16'", 'max_length': '100'}),
'resident_since_month': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5'}),
'resident_since_year': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1984'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'})
},
'survey.investigator': {
'Meta': {'object_name': 'Investigator'},
'age': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'backend': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Backend']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'English'", 'max_length': '100', 'null': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'default': "'Primary'", 'max_length': '100', 'null': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']", 'null': 'True'}),
'male': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'weights': ('django.db.models.fields.FloatField', [], {'default': '0'})
},
'survey.locationautocomplete': {
'Meta': {'object_name': 'LocationAutoComplete'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']", 'null': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'survey.multichoiceanswer': {
'Meta': {'object_name': 'MultiChoiceAnswer'},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.QuestionOption']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.numericalanswer': {
'Meta': {'object_name': 'NumericalAnswer'},
'answer': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '5', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.question': {
'Meta': {'object_name': 'Question'},
'answer_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'null': 'True', 'to': "orm['survey.Question']"}),
'subquestion': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'survey.questionoption': {
'Meta': {'object_name': 'QuestionOption'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'null': 'True', 'to': "orm['survey.Question']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'survey.randomhouseholdselection': {
'Meta': {'object_name': 'RandomHouseHoldSelection'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'no_of_households': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'selected_households': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'survey.textanswer': {
'Meta': {'object_name': 'TextAnswer'},
'answer': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
'survey.women': {
'Meta': {'object_name': 'Women'},
'aged_between_15_19_years': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'aged_between_20_49_years': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'women'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
}
}
complete_apps = ['survey']
| |
#!/usr/bin/python
"""
Author : Abraham Cabrera
Date : Sunday July 10, 2016.
Description : Yahoo weather json object respresentation.
"""
import pprint
import os
import urllib2
import urllib
import json
from alarm.plugin import CustomPluginBase
from gtts import gTTS
class YahooWeatherPlugin(CustomPluginBase):
"""
"""
def __init__(self):
self.msg = None
self.weatherReadingMedia = "weather.mp3"
self.mediaPlayer = "afplay"
def load(self):
"""
Pre-Fetches the Weather and loads it into Google's Text To Speech module to then save the output to a file.
:returns: True if succesful else False
:rtype: boolean
"""
success = True
try:
weather = Weather()
self.msg = "{0} where the temperature is {1} degrees {2} and {3}. Forecast ".format(weather.results.item.title,
weather.results.item.condition.temperature,
weather.results.units.temperature,
weather.results.item.condition.text)
tts = gTTS(text=self.msg, lang='en')
tts.save(self.weatherReadingMedia)
if os.path.exists(self.weatherReadingMedia):
success = True
except Exception as exception:
print exception
return success
def run(self):
"""
Plays the pre-fetched weather media.
:returns: True if succesful else False
:rtype: boolean
"""
success = False
try:
if os.path.exists(self.weatherReadingMedia):
cmd = "{0} {1}".format(self.mediaPlayer, self.weatherReadingMedia)
os.system(cmd)
success = True
except Exception as exception:
print exception
return success
def unload(self):
"""
Removes the weather media from the past load.
:returns: True if succesful else False
:rtype: boolean
"""
success = False
try:
if os.path.exists(self.weatherReadingMedia):
os.remove(self.weatherReadingMedia)
success = True
else:
print "No {0}".format(self.weatherReadingMedia)
except Exception as exception:
print exception
return success
class Unit(object):
"""
Weather units.
:param distance: distance unit.
:type distance: int
:param pressure: pressure unit
:type pressure: int
:param speed: speed unit
:type speed: int
:param temperature: temperature unit
:type temperature: str
"""
def __init__(self, distance, pressure, speed, temperature):
self.distance = distance
self.pressure = pressure
self.speed = speed
self.temperature = "fahrenheit" if temperature == "F" else None
def __repr__(self):
return "Distance:{0}, Pressure:{1}, Speed:{2}, Temperature:{3}".format(self.distance, self.pressure, self.speed, self.temperature)
class Forecast(object):
"""
Forecast representation.
:param code: descriptor code
:type code: int
:param date: date of forecast
:type date: str
:param high: temperatures in the highs
:type high: int
:param low: temperatures in the lows
:type low: int
:param text: forecast text descriptior
:type text: str
"""
def __init__(self, code, date, high, low, text, unit="fahrenheit"):
self.code = code
self.date = date
self.high = high
self.low = low
self.text = text
self.unit = unit
def __repr__(self):
return "For {0} will be at high {1}, low {2} degrees {3} and {4}.".format(self.date,
self.high,
self.low,
self.unit,
self.text)
def __str__(self):
return self.__repr__()
class Astronomy(object):
"""
Astronomy Representation
:param sunrise: time of sunrise.
:type sunrise: str
:param sunset: time of sunset.
:type sunset: str
"""
def __init__(self, sunrise, sunset):
self.sunrise = sunrise
self.sunset = sunset
def __repr__(self):
return "Sunrise:{0}, Sunset:{1}".format(self.sunrise, self.sunset)
class Atmosphere(object):
"""
:param humidity: humidity of the day
:type humidity: int
:param pressure: pressure of the day
:type pressure: float
:param rising:
:type rising: int
:param visibility: visibility of the day
:type visibility: float
"""
def __init__(self, humidity, pressure, rising, visibility):
self.humidity = humidity
self.pressure = pressure
self.rising = rising
self.visibility = visibility
def __repr__(self):
return "humidity:{0}, pressure:{1}, rising:{2}, visibility:{3}".format(self.humidity, self.pressure, self.rising, self.visibility)
class Location(object):
"""
Location of Weather
:param city: city of location
:type city: str
:param country: country of location
:type country: str
:param region: region
:type region: str
"""
def __init__(self, city, country, region):
self.city = city
self.country = country
self.region = region.strip()
def __repr__(self):
return "City:{0}, Region:{1}, Country:{2}".format(self.city, self.region, self.country)
class Wind(object):
"""
Wind object representation
:param chill: chill
:type chill: int
:param direction: direction
:type direction: int
:param speed: speed
:type speed: int
"""
def __init__(self, chill, direction, speed):
self.chill = chill
self.direction = direction
self.speed = speed
def __repr__(self):
return "Chill:{0}, Direction:{1}, Speed:{2}".format(self.chill, self.direction, self.speed)
class YahooWeatherImage(object):
"""
"""
def __init__(self, url, width, height, link, title):
self.url = url
self.width = width
self.height = height
self.link = link
self.title = title
def __repr__(self):
return "Url:{0}, Width:{1}, Height:{2}, Link:{3}, Title:{4}".format(self.url, self.width, self.height, self.link, self.title)
class YahooWeatherQueryResult(object):
"""
Object representation of Weather Query
:param count: query row count
:type count: int
:param lang: language of the query
:type lang: str
:param results: query results
:type results:
:param created: creation date
:type created: str
"""
def __init__(self, cnt, ln, results, crt):
self.count = cnt
self.lang = ln
self.results = Channel(results['channel']['lastBuildDate'],
results['channel']['atmosphere'],
results['channel']['description'],
results['channel']['language'],
results['channel']['title'],
results['channel']['image'],
results['channel']['item'],
results['channel']['link'],
results['channel']['location'],
results['channel']['ttl'],
results['channel']['units'],
results['channel']['astronomy'],
results['channel']['wind'])
self.created = crt
def __repr__(self):
return "{0}\n {1}\n {2}\n {3}".format(self.count, self.lang, self.results, self.created)
class Channel(object):
"""
:type lastBuildDate: str
:type atmosphere: class::~Atmosphere
:type description: str
:type language: str
:type type title: str
:type image: class::~YahooWeatherImage
:type item: class::~ChannelItem
:type link: str
:type location: class::~Location
:param ttl: some int
:type ttl: int
:type units: class::~Unit
:type astronomy: class::~Astronomy
:type wind: class::~Wind
"""
def __init__(self, buildDate, atmosphere, desc, lan, title, img, item, link, loc, ttl, units, astronomy, wind):
self.lastBuildDate = buildDate
self.atmosphere = Atmosphere(atmosphere['humidity'], atmosphere['pressure'], atmosphere['rising'], atmosphere['visibility'])
self.description = desc
self.title = title
self.image = YahooWeatherImage(img['url'], img['width'], img['height'], img['link'], img['title'])
self.item = ChannelItem(item['description'], item['pubDate'], item['title'], item['long'], item['forecast'], item['link'], item['lat'], item['guid'], item['condition'])
self.link = link
self.location = Location(loc['city'], loc['country'], loc['region'])
self.ttl = ttl
self.units = Unit(units['distance'], units['pressure'], units['speed'], units['temperature'])
self.astronomy = Astronomy(astronomy['sunrise'], astronomy['sunset'])
self.wind = Wind(wind['chill'], wind['direction'], wind['speed'])
class ChannelItem(object):
"""
:type description
:type pubDate
:type title
:type long
:type forecast
:type link
:type lat
:type guid
:type condition
"""
def __init__(self, desc, pubDate, title, long, forecast, link, lat, guid, condition):
self.description = desc
self.pubDate = pubDate
self.title = title
self.long = long
self.forecast = [ Forecast(f['code'], f['date'], f['high'], f['low'], f['text']) for f in forecast ]
self.link = link
self.lat = lat
self.guid = guid
self.condition = ItemCondition(condition['code'], condition['date'], condition['text'], condition['temp'])
class ItemCondition(object):
"""
{u'date': u'Sun, 10 Jul 2016 05:00 PM CDT', u'text': u'Sunny', u'code': u'32', u'temp': u'90'}
"""
def __init__(self, code, date, text, temperature):
self.code = code
self.date = date
self.text = text
self.temperature = temperature
def __repr__(self):
return "Today {0} with temperature of {1} and {2}".format(self.date, self.temperature, self.text)
class YahooWeatherManager(object):
"""
"""
def __init__(self, weatherLocation):
self.location = weatherLocation
self.endpoint = "https://query.yahooapis.com/v1/public/yql?"
self.yahooWeatherQueryResult = None
@classmethod
def getWoeid(location):
"""
Gets woeid of desired location.
:param location: desired location.
:type location: str
"""
woeid = none
return woeid
def setWeather(woeid, includeForecast=True):
"""
Will query Yahoos Weather API to get the weather and set it.
:param woeid: location of weather desired.
:type woeid: int
:param includeForecast: flag to include forecast
:type includeForecast: boolean
"""
try:
query = None
format = "json"
if includeForecast:
query = "select * from weather.forecast where woeid={0}".format(woeid)
if not query and format == "json":
yql_url = "{0}{1}&format={2}".format(self.endpoint,
urllib.urlencode({'q':query}), format)
data = urllib2.urlopen(yql_url).read()
result = json.loads(data)
self.yahooWeatherQueryResult = YahooWeatherQueryResult(result["count"],
result["lang"],
result["results"],
result["created"])
except Exception as exception:
print exception
def yahooWeather(woeid="2438265", format="json"):
"""
Function to query Yahoo Weather
:param woeid: weather location id
:type woeid: int
:param format: query output format
:type format: str
:returns: dictionary of query
:rtype: ~dict
"""
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = "select * from weather.forecast where woeid={0}".format(woeid)
yql_url = "{0}{1}&format={2}".format(baseurl, urllib.urlencode({'q':yql_query}), format)
result = urllib2.urlopen(yql_url).read()
return json.loads(result)
def Weather(woeid="2438265", format="json"):
"""
Calls yahoo weather api and creates constructs around the data returned.
:param woeid: weather location id
:type woeid: int
:param format: query output format
:type format: str
:returns: object of type YahooWeatherQueryResult
:rtype: class::~YahooWeatherQueryResult
"""
yahooWeatherDataJson = yahooWeather()
query = yahooWeatherDataJson['query']
weather = YahooWeatherQueryResult(query["count"],
query["lang"],
query["results"],
query["created"])
return weather
def main():
weather = Weather()
#print weather.results.atmosphere
#print weather.results.location
#print weather.results.units
#print weather.results.astronomy
#print weather.results.wind
#print weather.results.item.forecast
#print weather.results.item.condition
msg = "{0} where the temperature is {1} degrees {2} and {3}. Forecast ".format(weather.results.item.title,
weather.results.item.condition.temperature,
weather.results.units.temperature,
weather.results.item.condition.text)
forecast = ' '.join(str(f) for f in weather.results.item.forecast)
msg = msg + forecast
print msg
tts = gTTS(text=msg, lang='en')
tts.save("hello.mp3")
os.system("afplay hello.mp3")
if __name__ == "__main__":
main()
| |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Note.order'
db.add_column(u'catalog_note', 'order',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Note.order'
db.delete_column(u'catalog_note', 'order')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.Product']", 'unique': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'cfi_store_item_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeCfiStoreItem']", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.image': {
'Meta': {'object_name': 'Image'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likecfistoreitem': {
'Meta': {'unique_together': "(('user', 'cfi_store_item'),)", 'object_name': 'LikeCfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'cfi_store_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.CfiStoreItem']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'LikeMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likenote': {
'Meta': {'unique_together': "(('user', 'note'),)", 'object_name': 'LikeNote'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'note': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Note']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproduct': {
'Meta': {'unique_together': "(('user', 'product'),)", 'object_name': 'LikeProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproductdescription': {
'Meta': {'unique_together': "(('user', 'product_description'),)", 'object_name': 'LikeProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductDescription']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproductimage': {
'Meta': {'unique_together': "(('user', 'image'),)", 'object_name': 'LikeProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductImage']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeproducttutorial': {
'Meta': {'unique_together': "(('user', 'tutorial', 'product'),)", 'object_name': 'LikeProductTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.likeshop': {
'Meta': {'unique_together': "(('user', 'shop'),)", 'object_name': 'LikeShop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.list': {
'Meta': {'object_name': 'List'},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listitem': {
'Meta': {'object_name': 'ListItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.location': {
'Meta': {'object_name': 'Location'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.logidenticalproduct': {
'Meta': {'object_name': 'LogIdenticalProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': "orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': "orm['catalog.Product']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.makey': {
'Meta': {'object_name': 'Makey'},
'about': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Documentation']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'new_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewProduct']"}),
'new_users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewUser']"}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Note']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyvideos'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Video']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'catalog.makeyimage': {
'Meta': {'object_name': 'MakeyImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey_id': ('django.db.models.fields.IntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.newproduct': {
'Meta': {'object_name': 'NewProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Image']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.newuser': {
'Meta': {'object_name': 'NewUser'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.note': {
'Meta': {'object_name': 'Note'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.product': {
'Meta': {'object_name': 'Product'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'product_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeProduct']", 'to': u"orm['auth.User']"}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'makeys_as_tools': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tools_used'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'products'", 'blank': 'True', 'to': "orm['catalog.Tutorial']"})
},
'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.productreview': {
'Meta': {'object_name': 'ProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product_reviews'", 'to': "orm['catalog.Product']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shop_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeShop']", 'to': u"orm['auth.User']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.shopreview': {
'Meta': {'object_name': 'ShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shop_reviews'", 'to': "orm['catalog.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topshops': {
'Meta': {'object_name': 'TopShops'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.topusers': {
'Meta': {'object_name': 'TopUsers'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.userflags': {
'Meta': {'object_name': 'UserFlags'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_maker_intro': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_makey_intro': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.userinteraction': {
'Meta': {'object_name': 'UserInteraction'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'event': ('django.db.models.fields.IntegerField', [], {}),
'event_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
'catalog.video': {
'Meta': {'object_name': 'Video'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'embed_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'site': ('django.db.models.fields.IntegerField', [], {}),
'thumb_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.votemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'VoteMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteproductreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteshopreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ShopReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.votetutorial': {
'Meta': {'unique_together': "(('user', 'tutorial'),)", 'object_name': 'VoteTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['catalog']
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import kivy
from kivy.app import App
from flat_kivy.flatapp import FlatApp
import mc_objects
from mc_data import minecraft, minekampf, basemetals
from flat_kivy.font_definitions import style_manager
from flat_kivy.uix.flatcheckbox import FlatCheckBox
from kivy.uix.screenmanager import ScreenManager
from plyer import filechooser
from uix.pixelimage import PixelImage
from uix.itemtypecard import ItemTypeCard
from uix.enchantwidget import EnchantWidget
from uix.attributewidget import AttributeWidget
from uix.sliderwithvalues import SliderWithValues
from uix.namedvalueslider import NamedValueSlider
from screens.createitem import CreateItemScreen
from screens.home import HomeScreen
from screens.itemtypes import ItemTypesScreen
from screens.itemcategory import ItemCategoryScreen
from screens.mobtypes import MobTypesScreen
from screens.createmob import CreateMobScreen
from screens.createspawner import CreateSpawnerScreen
from mc_serialization import Item, Mob
from uix.namedcheckbox import NamedCheckBox
from uix.itemslotwidget import ItemSlotWidget
import kv
from kivy.clock import Clock
minecraft.register()
basemetals.register()
minekampf.register()
class KivyNBTRoot(ScreenManager):
def __init__(self, **kwargs):
super(KivyNBTRoot, self).__init__(**kwargs)
class KivyNBTApp(FlatApp):
def build(self):
self.setup_themes()
self.setup_font_ramps()
def open_create_item(self, item_name):
self.root.current = 'create_item'
screen = self.root.get_screen('create_item')
screen.clear_old()
screen.current_item = Item(item_name)
texture = mc_objects.ITEMS[item_name].texture_name
if texture is None:
texture = ''
screen.item_image = texture
screen.item_name = item_name
def open_create_mob(self, mob_name):
mob_name = mob_name.split(':', 1)[1]
self.root.current = 'create_mob'
screen = self.root.get_screen('create_mob')
screen.current_mob = mc_objects.MOBS[mob_name]
texture = mc_objects.MOBS[mob_name].image
if texture is None:
texture = ''
screen.mob_image = texture
screen.mob_name = mob_name
def open_create_spawner(self):
self.root.ids.create_spawner.clear_old()
self.root.current = 'create_spawner'
def open_load_mob(self):
self.root.current = 'create_mob'
self.root.ids.create_mob.load_file()
def open_load(self):
self.root.current = 'create_item'
self.root.ids.create_item.load_file()
def change_screen(self, screen_name):
self.root.current = screen_name
def change_to_item_types(self, screen_name, item_group):
self.root.current = screen_name
self.root.get_screen(screen_name).current_group = item_group
def on_start(self):
categories = ItemCategoryScreen(name='item_categories')
self.root.add_widget(categories)
categories.categories = mc_objects.ITEM_TYPES.keys()
self.root.ids.mob_types.mobs = mc_objects.MOBS
def setup_themes(self):
variant_1 = {
'FlatLabel':{
'color_tuple': ('Cyan', '900'),
},
'FlatToggleButton':{
'color_tuple': ('Purple', '500'),
'ripple_color_tuple': ('Cyan', '100'),
'font_color_tuple': ('Gray', '1000'),
'ripple_scale': 2.0,
},
'FlatButton':{
'color_tuple': ('Cyan', '800'),
'ripple_color_tuple': ('Cyan', '100'),
'font_color_tuple': ('Cyan', '200'),
'ripple_scale': 2.0,
},
'FlatCheckBox': {
'color_tuple': ('Cyan', '800'),
'ripple_color_tuple': ('Cyan', '100'),
'outline_color_tuple': ('Cyan', '200'),
'check_color_tuple': ('Cyan', '200'),
'ripple_scale': 2.0,
}
}
variant_2 = {
'FlatLabel':{
'color_tuple': ('Cyan', '200'),
},
'FlatToggleButton':{
'color_tuple': ('Cyan', '500'),
'ripple_color_tuple': ('Cyan', '100'),
'font_color_tuple': ('Gray', '0000'),
'font_ramp_tuple': ('Screen', '1'),
'ripple_scale': 2.0,
'multiline': True,
},
'FlatButton':{
'color_tuple': ('Cyan', '800'),
'ripple_color_tuple': ('Cyan', '100'),
'font_color_tuple': ('Cyan', '200'),
'ripple_scale': 2.0,
},
}
titles = {
'FlatLabel':{
'color_tuple': ('Gray', '1000'),
},
}
variant_3 = {
'FlatLabel': {
'color_tuple': ('Cyan', '800')
}
}
subtitles = {
'FlatLabel':{
'color_tuple': ('Cyan', '900'),
},
}
values = {
'FlatLabel':{
'color_tuple': ('Cyan', '900'),
},
'FlatButton':{
'ripple_color_tuple': ('Cyan', '900'),
'font_color_tuple': ('Cyan', '900'),
'ripple_scale': 2.0,
},
'FlatSlider': {
'color_tuple': ('Cyan', '900'),
'slider_color_tuple': ('Purple', '500'),
'outline_color_tuple': ('Cyan', '100'),
'slider_outline_color_tuple': ('Cyan', '100'),
'ripple_color_tuple': ('Cyan', '100'),
'ripple_scale': 10.,
},
'FlatToggleButton':{
'color_tuple': ('Purple', '600'),
'ripple_color_tuple': ('Cyan', '100'),
'font_color_tuple': ('Gray', '0000'),
'ripple_scale': 2.0,
},
}
self.theme_manager.add_theme('aqua', 'variant_1', variant_1)
self.theme_manager.add_theme('aqua', 'variant_2', variant_2)
self.theme_manager.add_theme('aqua', 'variant_3', variant_3)
self.theme_manager.add_theme('aqua', 'titles', titles)
self.theme_manager.add_theme('aqua', 'subtitles', subtitles)
self.theme_manager.add_theme('aqua', 'values', values)
def setup_font_ramps(self):
font_styles = {
'Display 4': {
'font': 'Roboto-Light.ttf',
'sizings': {'mobile': (112, 'sp'), 'desktop': (112, 'sp')},
'alpha': .8,
'wrap': False,
},
'Display 3': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (56, 'sp'), 'desktop': (56, 'sp')},
'alpha': .8,
'wrap': False,
},
'Display 2': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (45, 'sp'), 'desktop': (45, 'sp')},
'alpha': .8,
'wrap': True,
'wrap_id': '1',
'leading': (48, 'pt'),
},
'Display 1': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (34, 'sp'), 'desktop': (34, 'sp')},
'alpha': .8,
'wrap': True,
'wrap_id': '2',
'leading': (40, 'pt'),
},
'Headline': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (24, 'sp'), 'desktop': (24, 'sp')},
'alpha': .9,
'wrap': True,
'wrap_id': '3',
'leading': (32, 'pt'),
},
'Title': {
'font': 'Roboto-Medium.ttf',
'sizings': {'mobile': (20, 'sp'), 'desktop': (20, 'sp')},
'alpha': .9,
'wrap': False,
},
'Subhead': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (16, 'sp'), 'desktop': (15, 'sp')},
'alpha': .9,
'wrap': True,
'wrap_id': '4',
'leading': (28, 'pt'),
},
'Body 2': {
'font': 'Roboto-Medium.ttf',
'sizings': {'mobile': (14, 'sp'), 'desktop': (13, 'sp')},
'alpha': .9,
'wrap': True,
'wrap_id': '5',
'leading': (24, 'pt'),
},
'Body 1': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (14, 'sp'), 'desktop': (13, 'sp')},
'alpha': .9,
'wrap': True,
'wrap_id': '6',
'leading': (20, 'pt'),
},
'Body 0': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (10, 'sp'), 'desktop': (9, 'sp')},
'alpha': .9,
'wrap': True,
'wrap_id': '7',
'leading': (20, 'pt'),
},
'Caption': {
'font': 'Roboto-Regular.ttf',
'sizings': {'mobile': (12, 'sp'), 'desktop': (12, 'sp')},
'alpha': .8,
'wrap': False,
},
'Menu': {
'font': 'Roboto-Medium.ttf',
'sizings': {'mobile': (14, 'sp'), 'desktop': (13, 'sp')},
'alpha': .9,
'wrap': False,
},
'Button': {
'font': 'Roboto-Medium.ttf',
'sizings': {'mobile': (14, 'sp'), 'desktop': (14, 'sp')},
'alpha': .9,
'wrap': False,
},
}
for each in font_styles:
style = font_styles[each]
sizings = style['sizings']
style_manager.add_style(style['font'], each, sizings['mobile'],
sizings['desktop'], style['alpha'])
style_manager.add_font_ramp('1', ['Display 2', 'Display 1',
'Headline', 'Subhead', 'Body 2', 'Body 1', 'Body 0',])
if __name__ == '__main__':
KivyNBTApp().run()
| |
"""
A question generator.
Author: Mengye Ren Email: mren@cs.toronto.edu
Usage:
python question_generator.py -parser_path {Stanford parser path} \
-sentence {Single sentence} \
-list {List of sentences} \
-parsed_file {Parsed file} \
-output {Output file}
Prerequisites:
1. You need to download NLTK WordNet package.
>> python
>> import nltk
>> nltk.download()
>> d
>> wordnet
2. You need to download Stanford Parser
at http://nlp.stanford.edu/software/lex-parser.shtml#Download
Extract the zip into a folder and remember the path
3. You need to copy lexparser_sentence.sh into the Stanford Parser folder.
>> cp lexparser_sentence.sh stanford-parser/lexparser_sentence.sh
Examples:
1. Run single sentence.
>> python question_generator.py -sentence "A man is riding a horse"
2. Run a list of sentences.
Provide a file with each line in the file to be a sentence.
Output is a pickle file, storing a list. Each element in the list is a
tuple of five fields:
(1) Original sentence ID (0-based)
(2) Original sentence
(3) Generated question
(4) Answer to the generated question
(5) Type of the generated question
>> python question_generator.py -list sentences.txt -output questions.pkl
3. Run a pre-parsed file.
Run stanford parser to pre-compute the parse trees.
>> lexparser.sh sentences.txt > sentences_parsed.txt
>> python question_generator.py -parsed_file sentences_parsed.txt \
-output questions.pkl
"""
from nltk.corpus import wordnet
from nltk.stem.wordnet import WordNetLemmatizer
import argparse
import copy
import cPickle as pkl
import logger
import os
import re
import subprocess
import sys
import time
# A white list for color adjectives.
whiteListColorAdj = set(['red', 'yellow', 'orange', 'brown', 'green', 'blue',
'purple', 'black', 'white', 'gray', 'grey', 'violet'])
# A white list for lex names that can appear in object type questions.
whiteListLexname = set(['noun.animal', 'noun.artifact', 'noun.food',
'noun.object', 'noun.plant', 'noun.possession',
'noun.shape'])
# A black list for nouns with color.
blackListColorNoun = set(['ride', 'riding', 'past', 'stand', 'standing',
'eating', 'holding', 'frosting', 'glow', 'glowing',
'resting', 'parked'])
# A black list for nouns to appear in object type questions.
blackListNoun = set(['female', 'females', 'male', 'males', 'commuter',
'commuters', 'player', 'players', 'match', 'matches',
'rider', 'riders', 'doll', 'dolls', 'ride', 'rides',
'riding', 'past', 'pasts', 'teddy', 'fan', 'fans',
'street', 'streets', 'arm', 'arms', 'head', 'heads',
'slope', 'slopes', 'shoot', 'shoots', 'photo', 'photos',
'space', 'spaces', 'stand', 'stands', 'standing',
'cross', 'crosses', 'crossing', 'eating', 'walking',
'driving', 'upright', 'structure', 'turn', 'system',
'arrangement', 'set', 'top', 'while', 'well', 'area',
'produce', 'thing', 'things', 'cut', 'cuts', 'holding',
'frosting', 'glow', 'glowing', 'ground', 'parked'])
# A black list for compound nouns that appear in object type questions.
blackListCompoundNoun = set(['tennis', 'delivery', 'soccer', 'baseball',
'fighter', 'mother', 'window'])
# A black list of verbs that should never be parsed as verbs.
blackListVerb = set(['sink', 'sinks', 'counter', 'counters', 'cupboard',
'cupboards', 'has', 'have', 'contain', 'contains',
'containing', 'contained', 'spaniel', 'spaniels',
'mirror', 'mirrors', 'shower', 'showers', 'stove',
'stoves', 'bowl', 'bowls', 'tile', 'tiles', 'mouthwash',
'mouthwashes', 'smoke', 'smokes'])
# A black list of prepositions that we avoid asking questions within the
# clause.
blackListPrep = set(['with', 'of', 'in', 'down', 'as'])
# A black list of locations that we avoid asking location type questions upon.
blackListLocation = set(['t-shirt', 't-shirts', 'jeans', 'shirt', 'shirts',
'uniform', 'uniforms', 'jacket', 'jackets', 'dress',
'dresses', 'hat', 'hats', 'tie', 'ties', 'costume',
'costumes', 'attire', 'attires', 'match', 'matches',
'coat', 'coats', 'cap', 'caps', 'gear', 'gears',
'sweatshirt', 'sweatshirts', 'helmet', 'helmets',
'clothing', 'clothings', 'cloth', 'clothes',
'blanket', 'blankets', 'enclosure', 'enclosures',
'suit', 'suits', 'photo', 'photos', 'picture',
'pictures', 'round', 'rounds', 'area', 'well',
'skirt', 'snowsuit', 'sunglasses', 'sweater', 'mask',
'frisbee', 'frisbees', 'shoe', 'umbrella', 'towel',
'scarf', 'phone', 'cellphone', 'motorcycle',
'device', 'computer', 'cake', 'hydrant', 'desk',
'stove', 'sculpture', 'lamp', 'fireplace', 'bags',
'laptop', 'trolley', 'toy', 'bus', 'counter',
'buffet', 'engine', 'graffiti', 'clock', 'jet',
'ramp', 'brick', 'taxi', 'knife', 'flag', 'screen',
'parked'])
# A black list of verbs that are not asked in the location type questions.
blackListVerbLocation = set(['sink', 'sinks', 'counter', 'counters',
'cupboard', 'cupboards', 'has', 'have',
'contain', 'contains', 'containing', 'contained',
'can', 'cans'])
# A black list of nouns that are not asked in the how many type questions.
blackListNumberNoun = set(['pole', 'vase', 'kite', 'hay', 'shower', 'paddle',
'buffet', 'bicycle', 'bike', 'elephants'])
# A dictionary of synonyms to convert to.
synonymConvert = {'busses': 'buses', 'plane': 'airplane',
'planes': 'airplanes', 'aircraft': 'airplane',
'aircrafts': 'airplane', 'jetliner': 'airliner',
'jetliners': 'airliners', 'bike': 'bicycle',
'bikes': 'bicycles', 'cycle': 'bicycle',
'cycles': 'bicycles', 'motorbike': 'motorcycle',
'motorbikes': 'motorcycles', 'grey': 'gray',
'railroad': 'rail', 'cell': 'cellphone',
'doughnut': 'donut', 'doughnuts': 'donuts'}
# Compound nouns
compoundNoun = set(['fighter jet', 'soccer ball', 'tennis ball'])
# Special characters that may appear in the text.
charText = set(['.', ',', '-', '\'', '`', '/', '>', ':', ';', '\\', '!', '?',
'&', '-', '=', '#', '$', '@', '_', '*', '+', '%', chr(194),
chr(160)])
# Special characters that may appear in the class name.
charClassName = set(['.', ',', '$', '\'', '`', ':', '-', '#'])
# WordNet lemmatizer.
lemmatizer = WordNetLemmatizer()
# Logger
log = logger.get()
class TreeNode:
"""Parse tree."""
def __init__(self, className, text, children, level):
"""Construct a tree.
"""
self.className = className
self.text = text
self.children = children
self.level = level
pass
def __str__(self):
"""To string (with tree structure parentheses)."""
strlist = []
for i in range(self.level):
strlist.append(' ')
strlist.extend(['(', self.className])
if len(self.children) > 0:
strlist.append('\n')
for child in self.children:
strlist.append(child.__str__())
if len(self.text) > 0:
for i in range(self.level + 1):
strlist.append(' ')
else:
for i in range(self.level):
strlist.append(' ')
else:
strlist.append(' ')
strlist.append(self.text)
strlist.append(')\n')
return ''.join(strlist)
def toSentence(self):
"""Unfold the tree structure into a string."""
strlist = []
for child in self.children:
childSent = child.toSentence()
if len(childSent) > 0:
strlist.append(childSent)
if len(self.text) > 0:
strlist.append(self.text)
return ' '.join(strlist)
def relevel(self, level):
"""Re-assign level."""
self.level = level
for child in self.children:
child.relevel(level + 1)
def copy(self):
"""Clone a tree."""
children = []
for child in self.children:
children.append(child.copy())
return TreeNode(self.className, self.text, children, self.level)
class TreeParser:
"""Finite state machine implementation of syntax tree parser."""
def __init__(self):
self.state = 0
self.currentClassStart = 0
self.currentTextStart = 0
self.classNameStack = []
self.childrenStack = [[]]
self.root = None
self.rootsList = []
self.level = 0
self.stateTable = [self.state0, self.state1, self.state2,
self.state3, self.state4, self.state5, self.state6]
self.raw = None
self.state = 0
def parse(self, raw):
if not self.isAlpha(raw[0]):
self.raw = raw
for i in range(len(raw)):
self.state = self.stateTable[self.state](i)
@staticmethod
def isAlpha(c):
return 65 <= ord(c) <= 90 or 97 <= ord(c) <= 122
@staticmethod
def isNumber(c):
return 48 <= ord(c) <= 57
@staticmethod
def exception(raw, i):
print raw
raise Exception(
'Unexpected character "%c" (%d) at position %d'
% (raw[i], ord(raw[i]), i))
@staticmethod
def isClassName(s):
if TreeParser.isAlpha(s) or s in charClassName:
return True
else:
return False
@staticmethod
def isText(s):
if TreeParser.isAlpha(s) or TreeParser.isNumber(s) or s in charText:
return True
else:
return False
def state0(self, i):
if self.raw[i] == '(':
return 1
else:
return 0
def state1(self, i):
if self.isClassName(self.raw[i]):
self.currentClassStart = i
self.level += 1
self.childrenStack.append([])
return 2
else:
self.exception(self.raw, i)
def state2(self, i):
if self.isClassName(self.raw[i]):
return 2
else:
self.classNameStack.append(self.raw[self.currentClassStart:i])
if self.raw[i] == ' ' and self.raw[i + 1] == '(':
return 0
elif self.raw[i] == ' ' and self.isText(self.raw[i + 1]):
return 4
elif self.raw[i] == '\n':
return 3
else:
self.exception(self.raw, i)
def state3(self, i):
if self.raw[i] == ' ' and self.raw[i + 1] == '(':
return 0
elif self.raw[i] == ' ' and self.raw[i + 1] == ' ':
return 3
elif self.raw[i] == ' ' and self.isText(self.raw[i + 1]):
return 4
else:
return 3
def state4(self, i):
if self.isText(self.raw[i]):
# global currentTextStart
self.currentTextStart = i
return 5
else:
self.exception(self.raw, i)
def state5(self, i):
if self.isText(self.raw[i]):
return 5
elif i == len(self.raw) - 1:
return 5
elif self.raw[i] == ')':
self.wrapup(self.raw[self.currentTextStart:i])
if self.level == 0:
return 0
elif self.raw[i + 1] == ')':
return 6
else:
return 3
else:
self.exception(self.raw, i)
def state6(self, i):
if self.level == 0:
return 0
elif self.raw[i] == ')':
self.wrapup('')
return 6
else:
return 3
def wrapup(self, text):
self.level -= 1
root = TreeNode(self.classNameStack[-1], text,
self.childrenStack[-1][:], self.level)
del self.childrenStack[-1]
del self.classNameStack[-1]
self.childrenStack[-1].append(root)
if self.level == 0:
self.rootsList.append(root)
class QuestionGenerator:
"""Generates 4 types of questions.
1. Object - what
2. Number - how many
3. Color - what color
4. Location - where
"""
def __init__(self):
self.lexnameDict = {}
pass
@staticmethod
def escapeNumber(line):
"""Convert numbers into English."""
line = re.sub('^11$', 'eleven', line)
line = re.sub('^12$', 'twelve', line)
line = re.sub('^13$', 'thirteen', line)
line = re.sub('^14$', 'fourteen', line)
line = re.sub('^15$', 'fifteen', line)
line = re.sub('^16$', 'sixteen', line)
line = re.sub('^17$', 'seventeen', line)
line = re.sub('^18$', 'eighteen', line)
line = re.sub('^19$', 'nineteen', line)
line = re.sub('^20$', 'twenty', line)
line = re.sub('^10$', 'ten', line)
line = re.sub('^0$', 'zero', line)
line = re.sub('^1$', 'one', line)
line = re.sub('^2$', 'two', line)
line = re.sub('^3$', 'three', line)
line = re.sub('^4$', 'four', line)
line = re.sub('^5$', 'five', line)
line = re.sub('^6$', 'six', line)
line = re.sub('^7$', 'seven', line)
line = re.sub('^8$', 'eight', line)
line = re.sub('^9$', 'nine', line)
return line
def whMovement(self, root):
"""Performs WH - movement on a tree."""
stack = [[]] # A hack for closure support
found = [False]
def traverseFindTopClass(node, className):
if not found[0]:
stack[0].append(node)
if node.className == className:
found[0] = True
else:
for child in node.children:
traverseFindTopClass(child, className)
if not found[0]:
del stack[0][-1]
# Find the subject (first NP) and change determiner to 'the'
traverseFindTopClass(root, 'NP')
topNoun = None
if found[0]:
np = stack[0][-1]
while np.className != 'DT' and len(np.children) > 0:
np = np.children[0]
if np.className == 'DT' and np.text.lower() == 'a':
np.text = 'the'
np = stack[0][-1]
def lookForNoun(np):
if len(np.children) > 0:
for child in np.children:
answer = lookForNoun(child)
if (answer != None):
return answer
return None
else:
if np.className == 'NN' or np.className == 'NNS':
return np
else:
return None
topNoun = lookForNoun(np)
# Find the top verb
found[0] = False
stack[0] = []
traverseFindTopClass(root, 'VP')
topVP = None
if found[0]:
topVP = stack[0][-1]
# First look for the position of WHNP
found[0] = False
stack[0] = []
traverseFindTopClass(root, 'WHNP')
if not found[0]:
return False
# Check if the WHNP is inside an SBAR, not handling this case for now.
insideSBar = False
# Check if inside NP, violates A-over-A principal
insideNP = False
insideVP = False
whStack = stack[0][:]
whPosition = len(whStack) - 1
for item in whStack:
if item.className == 'SBAR':
insideSBar = True
elif item.className == 'NP' and item.level > 1:
insideNP = True
elif insideNP and item.className == 'VP':
insideVP = True
# Look for VP
found[0] = False
stack[0] = []
traverseFindTopClass(root, 'VP')
node = root
parent = root
while len(node.children) > 0:
parent = node
node = node.children[0]
if parent.className == 'WHNP':
if found[0]:
# Add in missing verbs if possible
vpnode = stack[0][-1]
vpchild = vpnode.children[0]
frontWord = None
if vpchild.className == 'VBG': # only doing present, no is/are
verb = 'are' if root.answer.className == 'NNS' else 'is'
verbnode = TreeNode('VB', verb, [], vpchild.level)
vpnode.children.insert(0, verbnode)
return True
if insideSBar:
return False
if insideVP:
return False
if not found[0]:
return False
# Look for the verb that needs to be moved to the front.
vpnode = stack[0][-1]
vpchild = vpnode.children[0]
frontWord = None
if vpchild.className == 'VBZ': # is, has, singular present
if vpchild.text == 'is':
frontWord = vpchild
vpnode.children.remove(vpchild)
elif vpchild.text == 'has': # Could be has something or has done
done = False
for child in vpnode.children:
if child.className == 'VP':
done = True
break
if done:
frontWord = vpchild
vpnode.children.remove(vpchild)
else:
frontWord = TreeNode('VBZ', 'does', [], 0)
vpchild.text = 'have'
vpchild.className = 'VB'
else:
# need to lemmatize the verb and separate does
frontWord = TreeNode('VBZ', 'does', [], 0)
vpchild.className = 'VB'
vpchild.text = lemmatizer.lemmatize(vpchild.text, 'v')
pass
elif vpchild.className == 'VBP': # do, have, present
if vpchild.text == 'are':
frontWord = vpchild
vpnode.children.remove(vpchild)
else:
frontWord = TreeNode('VBP', 'do', [], 0)
vpchild.className = 'VB'
pass
elif vpchild.className == 'VBD': # did, past tense
if vpchild.text == 'was' or vpchild.text == 'were':
frontWord = vpchild
vpnode.children.remove(vpchild)
elif vpchild.text == 'had': # Could be had something or had done
done = False
for child in vpnode.children:
if child.className == 'VP':
done = True
break
if done:
frontWord = vpchild
vpnode.children.remove(vpchild)
else:
frontWord = TreeNode('VBD', 'did', [], 0)
vpchild.text = 'have'
vpchild.className = 'VB'
else:
# need to lemmatize the verb and separate did
frontWord = TreeNode('VBD', 'did', [], 0)
vpchild.className = 'VB'
vpchild.text = lemmatizer.lemmatize(vpchild.text, 'v')
pass
elif vpchild.className == 'MD': # will, may, shall
frontWord = vpchild
vpnode.children.remove(vpchild)
pass
elif vpchild.className == 'VBG': # only doing present, no is/are
verb = 'are' if topNoun != None and topNoun.className == 'NNS' else 'is'
frontWord = TreeNode('VBZ', verb, [], 0)
# Verb not found
if frontWord is None:
return False
# Remove WHNP from its parent.
whStack[whPosition - 1].children.remove(whStack[whPosition])
bigS = TreeNode('S', '', [whStack[whPosition], stack[0][1]], 0)
stack[0][0].children = [bigS]
bigS.children[1].children.insert(0, frontWord)
# Reassign levels to the new tree.
root.relevel(0)
return True
def splitCCStructure(self, root):
"""Split composite sentences."""
roots = []
# Directly search for the top-most S.
node = root.children[0]
if node.className == 'S':
if len(node.children) >= 3:
childrenClasses = []
for child in node.children:
childrenClasses.append(child.className)
renew = True
index = 0
for c in childrenClasses:
if c == 'S' and renew:
root_ = TreeNode('ROOT', '', [node.children[index]], 0)
root_.relevel(0)
roots.append(root_)
elif c == 'CC':
renew = True
index += 1
if len(roots) == 0:
roots.append(root)
return roots
def lookupLexname(self, word):
"""Look up lex name of a word in WordNet."""
if word in self.lexnameDict:
return self.lexnameDict[word]
else:
synsets = wordnet.synsets(word)
# Just pick the first definition
if len(synsets) > 0:
self.lexnameDict[word] = synsets[0].lexname()
return self.lexnameDict[word]
else:
return None
def askWhere(self, root):
"""Ask location type questions."""
found = [False]
answer = ['']
def traverse(node, parent):
# Ask one question for now.
cont = True
if node.text.lower() == 'this' or \
node.text.lower() == 'that' or \
node.text.lower() == 'there':
node.text = ''
if len(node.children) > 1 and \
node.children[1].className == 'VP':
c = node.children[1]
while(len(c.children) > 0):
c = c.children[0]
if c.text.lower() in blackListVerbLocation:
cont = False
if not found[0] and cont and node.className != 'PP':
for child in node.children:
traverse(child, node)
if node.className == 'PP' and \
node.children[0].text == 'in':
c = node
while(len(c.children) > 0 and
(c.children[-1].className == 'NP'
or c.children[-1].className == 'NN')):
c = c.children[-1]
if c.className == 'NN'and \
self.lookupLexname(c.text) == 'noun.artifact' and \
not c.text.lower() in blackListLocation:
found[0] = True
answer[0] = c.text
# Treat ``where'' as WHNP for now.
where = TreeNode('WRB', 'where', [], 0)
parent.children.insert(parent.children.index(node),
TreeNode('WHNP', '', [where], 0))
parent.children.remove(node)
# Remove other PP and ADVP in the parent
for child in parent.children:
if child.className == 'PP' or \
child.className == 'ADVP':
parent.children.remove(child)
traverse(root, None)
if found[0]:
if self.whMovement(root):
if root.children[0].children[-1].className != '.':
root.children[0].children.append(TreeNode('.', '?', [], 2))
return [(root.toSentence().lower(), answer[0])]
else:
return []
else:
return []
def askWhoWhat(self, root):
"""Ask object type questions."""
found = [False] # A hack for closure support in python 2.7
answer = ['']
# Unlike in 'how many', here we enumerate all possible 'what's
rootsReplaceWhat = [[]]
def traverse(node, parent):
# if node.className != 'PP':
cont = True
# For now, not asking any questions inside PP.
if node.className == 'PP' and node.text.lower() in blackListPrep:
cont = False
if (node.level > 1 and node.className == 'S') or \
node.className == 'SBAR':
# Ignore possible answers in any clauses.
cont = False
ccNoun = False
for child in node.children:
if child.className == 'CC' or child.className == ',':
ccNoun = True
break
if node.className == 'NP' and ccNoun:
cont = False
if len(node.children) > 1 and \
node.children[1].className == 'PP':
cont = False
if len(node.children) > 1 and \
node.children[1].className == 'VP':
c = node.children[1]
while(len(c.children) > 0):
c = c.children[0]
if c.text.lower() in blackListVerb:
cont = False
if node.className == 'VP' and \
(node.children[0].text.startswith('attach') or
node.children[0].text.startswith('take')):
cont = False
# TRUNCATE SBAR!!!!!
for child in node.children:
if child.className == 'SBAR' or \
(child.level > 1 and child.className == 'S'):
node.children.remove(child)
if cont:
for child in node.children:
if child.className != 'PP' and \
child.className != 'ADVP':
traverse(child, node)
if node.className == 'NP' and not ccNoun:
replace = None
whword = None
for child in node.children:
# A wide ``angle'' view of the kitchen work area
if parent is not None:
if node.children.index(child) == len(node.children) - 1:
if parent.children.index(node) != \
len(parent.children) - 1:
if parent.children[
parent.children.index(node) + 1]\
.className == 'NP':
break
# The two people are walking down the ``beach''
foundDown = False
if parent.children.index(node) != 0:
for sib in parent.children[
parent.children.index(node) - 1].children:
if sib.text == 'down':
foundDown = True
if foundDown:
break
if child.className == 'NN' or child.className == 'NNS':
lexname = self.lookupLexname(child.text)
if lexname is not None:
if lexname in whiteListLexname and \
not child.text.lower() in blackListNoun:
whword = 'what'
if whword is not None:
answer[0] = child.text
found[0] = True
replace = child
if replace != None and not answer[0].lower() in blackListNoun:
what = TreeNode('WP', whword, [], node.level + 1)
children_bak = copy.copy(node.children)
toremove = []
for child in node.children:
lexname = self.lookupLexname(child.text)
if child != replace and (
lexname != 'noun.act' or
child.className != 'NN' or
child.text.lower() in blackListCompoundNoun):
toremove.append(child)
for item in toremove:
node.children.remove(item)
if len(node.children) == 1:
node.children = [what]
node.className = 'WHNP'
else:
node.children[node.children.index(replace)] = TreeNode(
'WHNP', '', [what], node.level + 2)
rootcopy = root.copy()
rootcopy.answer = replace
rootsReplaceWhat[0].append(rootcopy)
node.className = 'NP'
node.children = children_bak
rootsSplitCC = self.splitCCStructure(root)
for r in rootsSplitCC:
traverse(r, None)
for r2 in rootsReplaceWhat[0]:
if r2.children[0].children[-1].className != '.':
r2.children[0].children.append(TreeNode('.', '?', [], 2))
else:
r2.children[0].children[-1].text = '?'
if found[0]:
self.whMovement(r2)
yield (r2.toSentence().lower(),
self.escapeNumber(r2.answer.text.lower()))
else:
pass
found[0] = False
answer[0] = None
rootsReplaceWhat[0] = []
def askHowMany(self, root):
"""Ask couting questions."""
# A hack for closure support in python 2.7
found = [False]
answer = [None]
def traverse(node):
if not found[0]:
ccNoun = False
cont = True
for child in node.children:
if child.className == 'CC' or child.className == ',':
ccNoun = True
break
if node.className == 'NP' and ccNoun:
cont = False
if node.className == 'PP':
cont = False
if cont:
for child in node.children:
traverse(child)
if node.className == 'NP' and (
node.children[-1].className == 'NNS' or
node.children[-1].className == 'NN') and \
not node.children[-1].text.startswith('end'):
count = None
for child in node.children:
if child.className == 'CD':
if not child.text.lower() in \
blackListNumberNoun:
found[0] = True
answer[0] = child
count = child
if found[0] and count is not None:
how = TreeNode('WRB', 'how', [], node.level + 2)
many = TreeNode('JJ', 'many', [], node.level + 2)
howmany = TreeNode('WHNP', '', [how, many],
node.level + 1)
children = [howmany]
children.extend(node.children[
node.children.index(count) + 1:])
node.children = children
node.className = 'WHNP'
return
roots = self.splitCCStructure(root)
for r in roots:
traverse(r)
if r.children[0].children[-1].className != '.':
r.children[0].children.append(TreeNode('.', '?', [], 2))
else:
r.children[0].children[-1].text = '?'
if found[0] and \
not answer[0].text.lower() in blackListNumberNoun:
r.answer = answer[0]
self.whMovement(r)
yield (r.toSentence().lower(), self.escapeNumber(
answer[0].text.lower()))
found[0] = False
answer[0] = None
def askColor(self, root):
"""Ask color questions."""
found = [False]
answer = [None]
obj = [None]
qa = [[]]
template = 'what is the color of the %s ?'
def traverse(node):
for child in node.children:
traverse(child)
if node.className == 'NP':
for child in node.children:
if child.className == 'JJ' and \
child.text.lower() in whiteListColorAdj:
found[0] = True
answer[0] = child
if child.className == 'CC' and \
child.text == 'and':
# Blue and white? No.
found[0] = False
answer[0] = None
break
if (child.className == 'NN' or
child.className == 'NNS') and \
not child.text.lower() in blackListColorNoun:
obj[0] = child
if found[0] and obj[0] is not None:
qa[0].append(((template % obj[0].text).lower(),
answer[0].text.lower()))
found[0] = False
obj[0] = None
answer[0] = None
traverse(root)
return qa[0]
def lookupSynonym(word):
"""Lookup synonyms in the table."""
if word in synonymConvert:
return synonymConvert[word]
else:
return word
def questionGen(parseFilename, outputFilename=None):
"""Generates questions."""
startTime = time.time()
qCount = 0
numSentences = 0
parser = TreeParser()
gen = QuestionGenerator()
questionAll = []
def newTree():
return parser.rootsList[0].copy()
def addQuestion(sentId, origSent, question, answer, typ):
questionAll.append((sentId, origSent, question, answer, typ))
def addItem(qaitem, origSent, typ):
ques = qaitem[0]
ans = lookupSynonym(qaitem[1])
log.info('Question {:d}: {} Answer: {}'.format(
qCount, ques, ans))
addQuestion(numSentences, origSent, ques, ans, typ)
with open(parseFilename) as f:
for line in f:
if len(parser.rootsList) > 0:
origSent = parser.rootsList[0].toSentence()
# 0 is what-who question type
for qaitem in gen.askWhoWhat(newTree()):
# Ignore too short questions
if len(qaitem[0].split(' ')) < 5:
continue
qCount += 1
addItem(qaitem, origSent, 0)
# 1 is how-many question type
for qaitem in gen.askHowMany(newTree()):
qCount += 1
addItem(qaitem, origSent, 1)
# 2 is color question type
for qaitem in gen.askColor(newTree()):
qCount += 1
addItem(qaitem, origSent, 2)
# 3 is location question type
for qaitem in gen.askWhere(newTree()):
qCount += 1
addItem(qaitem, origSent, 3)
del(parser.rootsList[0])
numSentences += 1
parser.parse(line)
log.info('Number of sentences: {:d}'.format(numSentences))
log.info('Time elapsed: {:f} seconds'.format(time.time() - startTime))
log.info('Number of questions: {:d}'.format(qCount))
if outputFilename is not None:
log.info('Writing to output {}'.format(
os.path.abspath(outputFilename)))
with open(outputFilename, 'wb') as f:
pkl.dump(questionAll, f)
pass
def printQAs(qaiter, qid=0):
"""Print QA pair."""
for qaitem in qaiter:
log.info('Question {:d}: {} Answer: {}'.format(
qid, qaitem[0], qaitem[1]))
pass
def stanfordParseSingle(parserFolder, sentence):
"""Call stanford parser on a single sentence."""
tmpFname = 'tmp.txt'
tmpOutFname = 'tmpout.txt'
with open(tmpFname, 'w') as f:
f.write(sentence)
stanfordParseFile(parserFolder, tmpFname, tmpOutFname)
with open(tmpOutFname) as f:
result = f.read()
os.remove(tmpFname)
os.remove(tmpOutFname)
return result
def stanfordParseFile(parserFolder, inputFilename, outputFilename):
"""Call stanford parser on an input file.
"""
stanfordParserPath = os.path.join(parserFolder, 'lexparser.sh')
with open(outputFilename, 'w') as fout:
subprocess.call([stanfordParserPath, inputFilename], stdout=fout)
pass
def runSentence(parserFolder, sentence):
"""Run a single sentence."""
s = stanfordParseSingle(parserFolder, sentence)
s = s.split('\n')
parser = TreeParser()
gen = QuestionGenerator()
for i in range(len(s)):
parser.parse(s[i] + '\n')
tree = parser.rootsList[0]
log.info('Parser result:')
log.info(tree)
qaiter = gen.askWhoWhat(tree.copy())
printQAs(qaiter)
qaiter = gen.askHowMany(tree.copy())
printQAs(qaiter)
qaiter = gen.askColor(tree.copy())
printQAs(qaiter)
qaiter = gen.askWhere(tree.copy())
printQAs(qaiter)
pass
def runList(parserFolder, inputFilename, outputFilename=None):
"""Run a list of sentences."""
parseFilename = inputFilename + '.parse.txt'
stanfordParseFile(parserFolder, inputFilename, parseFilename)
questionGen(parseFilename, outputFilename)
pass
def parseArgs():
"""Parse input arguments."""
parser = argparse.ArgumentParser(description='Question Generator')
parser.add_argument(
'-parser_path',
default='/home/mren/third_party/stanford-parser-full-2015-04-20',
help='Path to stanford parser')
parser.add_argument(
'-sentence', default=None, help='Single sentence input')
parser.add_argument(
'-list', default=None, help='List file input')
parser.add_argument(
'-parsed_file', default=None, help='Parsed file input')
parser.add_argument(
'-output', default=None, help='Output file name')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parseArgs()
if args.sentence:
log.info('Single sentence mode')
runSentence(parserFolder=args.parser_path, sentence=args.sentence)
elif args.list:
log.info('List of sentences mode')
runList(parserFolder=args.parser_path,
inputFilename=args.list, outputFilename=args.output)
elif args.parsed_file:
log.info('Pre-parsed file mode')
questionGen(parseFilename=args.parsed_file, outputFilename=args.output)
else:
raise Exception(
('You must provide one of the three options: -sentence, -list, ',
'or -parsed_file'))
| |
# -*- coding: utf-8 -*-
from cms import constants
from cms.apphook_pool import apphook_pool
from cms.forms.widgets import UserSelectAdminWidget
from cms.models import (Page, PagePermission, PageUser, ACCESS_PAGE,
PageUserGroup, titlemodels)
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import get_language_tuple, get_language_list
from cms.utils.mail import mail_page_user_change
from cms.utils.page import is_valid_page_slug
from cms.utils.page_resolver import get_page_from_path, is_valid_url
from cms.utils.permissions import (get_current_user, get_subordinate_users,
get_subordinate_groups, get_user_permission_level)
from cms.utils.urlutils import any_path_re
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.db.models.fields import BooleanField
from django.forms.util import ErrorList
from django.forms.widgets import HiddenInput
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _, get_language
from menus.menu_pool import menu_pool
def get_permission_acessor(obj):
if isinstance(obj, (PageUser, User,)):
rel_name = 'user_permissions'
else:
rel_name = 'permissions'
return getattr(obj, rel_name)
def save_permissions(data, obj):
models = (
(Page, 'page'),
(PageUser, 'pageuser'),
(PageUserGroup, 'pageuser'),
(PagePermission, 'pagepermission'),
)
if not obj.pk:
# save obj, otherwise we can't assign permissions to him
obj.save()
permission_acessor = get_permission_acessor(obj)
for model, name in models:
content_type = ContentType.objects.get_for_model(model)
for t in ('add', 'change', 'delete'):
# add permission `t` to model `model`
codename = getattr(model._meta, 'get_%s_permission' % t)()
permission = Permission.objects.get(content_type=content_type, codename=codename)
if data.get('can_%s_%s' % (t, name), None):
permission_acessor.add(permission)
else:
permission_acessor.remove(permission)
class PageAddForm(forms.ModelForm):
title = forms.CharField(label=_("Title"), widget=forms.TextInput(),
help_text=_('The default title'))
slug = forms.CharField(label=_("Slug"), widget=forms.TextInput(),
help_text=_('The part of the title that is used in the URL'))
language = forms.ChoiceField(label=_("Language"), choices=get_language_tuple(),
help_text=_('The current language of the content fields.'))
class Meta:
model = Page
exclude = ["created_by", "changed_by", "placeholders"]
def __init__(self, *args, **kwargs):
super(PageAddForm, self).__init__(*args, **kwargs)
self.fields['parent'].widget = HiddenInput()
self.fields['site'].widget = HiddenInput()
if not self.fields['site'].initial:
self.fields['site'].initial = Site.objects.get_current().pk
site_id = self.fields['site'].initial
languages = get_language_tuple(site_id)
self.fields['language'].choices = languages
if not self.fields['language'].initial:
self.fields['language'].initial = get_language()
if (self.fields['parent'].initial and
get_cms_setting('TEMPLATE_INHERITANCE') in
[name for name, value in get_cms_setting('TEMPLATES')]):
# non-root pages default to inheriting their template
self.fields['template'].initial = constants.TEMPLATE_INHERITANCE_MAGIC
def clean(self):
cleaned_data = self.cleaned_data
slug = cleaned_data.get('slug', '')
page = self.instance
lang = cleaned_data.get('language', None)
# No language, can not go further, but validation failed already
if not lang:
return cleaned_data
if 'parent' not in cleaned_data:
cleaned_data['parent'] = None
parent = cleaned_data.get('parent', None)
try:
site = self.cleaned_data.get('site', Site.objects.get_current())
except Site.DoesNotExist:
site = None
raise ValidationError("No site found for current settings.")
if parent and parent.site != site:
raise ValidationError("Site doesn't match the parent's page site")
if site and not is_valid_page_slug(page, parent, lang, slug, site):
self._errors['slug'] = ErrorList([_('Another page with this slug already exists')])
del cleaned_data['slug']
if self.cleaned_data.get('published') and page.title_set.count():
#Check for titles attached to the page makes sense only because
#AdminFormsTests.test_clean_overwrite_url validates the form with when no page instance available
#Looks like just a theoretical corner case
try:
title = page.get_title_obj(lang, fallback=False)
except titlemodels.Title.DoesNotExist:
title = None
if title and not isinstance(title, titlemodels.EmptyTitle) and slug:
oldslug = title.slug
title.slug = slug
title.save()
try:
is_valid_url(title.path,page)
except ValidationError,e:
title.slug = oldslug
title.save()
if 'slug' in cleaned_data:
del cleaned_data['slug']
self._errors['slug'] = ErrorList(e.messages)
return cleaned_data
def clean_slug(self):
slug = slugify(self.cleaned_data['slug'])
if not slug:
raise ValidationError("Slug must not be empty.")
return slug
def clean_language(self):
language = self.cleaned_data['language']
if not language in get_language_list():
raise ValidationError("Given language does not match language settings.")
return language
class PageForm(PageAddForm):
menu_title = forms.CharField(label=_("Menu Title"), widget=forms.TextInput(),
help_text=_('Overwrite what is displayed in the menu'), required=False)
page_title = forms.CharField(label=_("Page Title"), widget=forms.TextInput(),
help_text=_('Overwrites what is displayed at the top of your browser or in bookmarks'), required=False)
application_urls = forms.ChoiceField(label=_('Application'),
choices=(), required=False,
help_text=_('Hook application to this page.'))
overwrite_url = forms.CharField(label=_('Overwrite URL'), max_length=255, required=False,
help_text=_('Keep this field empty if standard path should be used.'))
redirect = forms.CharField(label=_('Redirect'), max_length=255, required=False,
help_text=_('Redirects to this URL.'))
meta_description = forms.CharField(label='Description meta tag', required=False, widget=forms.Textarea,
help_text=_('A description of the page sometimes used by search engines.'))
meta_keywords = forms.CharField(label='Keywords meta tag', max_length=255, required=False,
help_text=_('A list of comma seperated keywords sometimes used by search engines.'))
def __init__(self, *args, **kwargs):
super(PageForm, self).__init__(*args, **kwargs)
if 'navigation_extenders' in self.fields:
self.fields['navigation_extenders'].widget = forms.Select({}, [('', "---------")] + menu_pool.get_menus_by_attribute("cms_enabled", True))
if 'application_urls' in self.fields:
self.fields['application_urls'].choices = [('', "---------")] + apphook_pool.get_apphooks()
def clean(self):
cleaned_data = super(PageForm, self).clean()
if 'reverse_id' in self.fields:
id = cleaned_data['reverse_id']
site_id = cleaned_data['site']
if id:
if Page.objects.filter(reverse_id=id, site=site_id, publisher_is_draft=True).exclude(pk=self.instance.pk).count():
self._errors['reverse_id'] = self.error_class([_('A page with this reverse URL id exists already.')])
return cleaned_data
def clean_overwrite_url(self):
if 'overwrite_url' in self.fields:
url = self.cleaned_data['overwrite_url']
is_valid_url(url,self.instance)
# TODO: Check what happens if 'overwrite_url' is NOT in self.fields
return url
class PagePermissionInlineAdminForm(forms.ModelForm):
"""
Page permission inline admin form used in inline admin. Required, because
user and group queryset must be changed. User can see only users on the same
level or under him in choosen page tree, and users which were created by him,
but aren't assigned to higher page level than current user.
"""
page = forms.ModelChoiceField(Page, label=_('user'), widget=HiddenInput(), required=True)
def __init__(self, *args, **kwargs):
super(PagePermissionInlineAdminForm, self).__init__(*args, **kwargs)
user = get_current_user() # current user from threadlocals
sub_users = get_subordinate_users(user)
limit_choices = True
use_raw_id = False
# Unfortunately, if there are > 500 users in the system, non-superusers
# won't see any benefit here because if we ask Django to put all the
# user PKs in limit_choices_to in the query string of the popup we're
# in danger of causing 414 errors so we fall back to the normal input
# widget.
if get_cms_setting('RAW_ID_USERS'):
if sub_users.count() < 500:
# If there aren't too many users, proceed as normal and use a
# raw id field with limit_choices_to
limit_choices = True
use_raw_id = True
elif get_user_permission_level(user) == 0:
# If there are enough choices to possibly cause a 414 request
# URI too large error, we only proceed with the raw id field if
# the user is a superuser & thus can legitimately circumvent
# the limit_choices_to condition.
limit_choices = False
use_raw_id = True
# We don't use the fancy custom widget if the admin form wants to use a
# raw id field for the user
if use_raw_id:
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
# This check will be False if the number of users in the system
# is less than the threshold set by the RAW_ID_USERS setting.
if isinstance(self.fields['user'].widget, ForeignKeyRawIdWidget):
# We can't set a queryset on a raw id lookup, but we can use
# the fact that it respects the limit_choices_to parameter.
if limit_choices:
self.fields['user'].widget.rel.limit_choices_to = dict(
id__in=list(sub_users.values_list('pk', flat=True))
)
else:
self.fields['user'].widget = UserSelectAdminWidget()
self.fields['user'].queryset = sub_users
self.fields['user'].widget.user = user # assign current user
self.fields['group'].queryset = get_subordinate_groups(user)
def clean(self):
super(PagePermissionInlineAdminForm, self).clean()
for field in self.Meta.model._meta.fields:
if not isinstance(field, BooleanField) or not field.name.startswith('can_'):
continue
name = field.name
self.cleaned_data[name] = self.cleaned_data.get(name, False)
can_add = self.cleaned_data['can_add']
can_edit = self.cleaned_data['can_change']
# check if access for childrens, or descendants is granted
if can_add and self.cleaned_data['grant_on'] == ACCESS_PAGE:
# this is a missconfiguration - user can add/move page to current
# page but after he does this, he will not have permissions to
# access this page anymore, so avoid this
raise forms.ValidationError(_("Add page permission requires also "
"access to children, or descendants, otherwise added page "
"can't be changed by its creator."))
if can_add and not can_edit:
raise forms.ValidationError(_('Add page permission also requires edit page permission.'))
# TODO: finish this, but is it really required? might be nice to have
# check if permissions assigned in cms are correct, and display
# a message if not - correctness mean: if user has add permisson to
# page, but he does'nt have auth permissions to add page object,
# display warning
return self.cleaned_data
def save(self, commit=True):
"""
Makes sure the boolean fields are set to False if they aren't
available in the form.
"""
instance = super(PagePermissionInlineAdminForm, self).save(commit=False)
for field in self._meta.model._meta.fields:
if isinstance(field, BooleanField) and field.name.startswith('can_'):
setattr(instance, field.name, self.cleaned_data.get(field.name, False))
if commit:
instance.save()
return instance
class Meta:
model = PagePermission
class ViewRestrictionInlineAdminForm(PagePermissionInlineAdminForm):
can_view = forms.BooleanField(label=_('can_view'), widget=HiddenInput(), initial=True)
def clean_can_view(self):
self.cleaned_data["can_view"] = True
return self.cleaned_data
class GlobalPagePermissionAdminForm(forms.ModelForm):
def clean(self):
super(GlobalPagePermissionAdminForm, self).clean()
if not self.cleaned_data['user'] and not self.cleaned_data['group']:
raise forms.ValidationError(_('Please select user or group first.'))
return self.cleaned_data
class GenericCmsPermissionForm(forms.ModelForm):
"""Generic form for User & Grup permissions in cms
"""
can_add_page = forms.BooleanField(label=_('Add'), required=False, initial=True)
can_change_page = forms.BooleanField(label=_('Change'), required=False, initial=True)
can_delete_page = forms.BooleanField(label=_('Delete'), required=False)
can_recover_page = forms.BooleanField(label=_('Recover (any) pages'), required=False)
# pageuser is for pageuser & group - they are combined together,
# and read out from PageUser model
can_add_pageuser = forms.BooleanField(label=_('Add'), required=False)
can_change_pageuser = forms.BooleanField(label=_('Change'), required=False)
can_delete_pageuser = forms.BooleanField(label=_('Delete'), required=False)
can_add_pagepermission = forms.BooleanField(label=_('Add'), required=False)
can_change_pagepermission = forms.BooleanField(label=_('Change'), required=False)
can_delete_pagepermission = forms.BooleanField(label=_('Delete'), required=False)
def populate_initials(self, obj):
"""Read out permissions from permission system.
"""
initials = {}
permission_acessor = get_permission_acessor(obj)
for model in (Page, PageUser, PagePermission):
name = model.__name__.lower()
content_type = ContentType.objects.get_for_model(model)
permissions = permission_acessor.filter(content_type=content_type).values_list('codename', flat=True)
for t in ('add', 'change', 'delete'):
codename = getattr(model._meta, 'get_%s_permission' % t)()
initials['can_%s_%s' % (t, name)] = codename in permissions
return initials
class PageUserForm(UserCreationForm, GenericCmsPermissionForm):
notify_user = forms.BooleanField(label=_('Notify user'), required=False,
help_text=_('Send email notification to user about username or password change. Requires user email.'))
class Meta:
model = PageUser
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
if instance:
initial = initial or {}
initial.update(self.populate_initials(instance))
super(PageUserForm, self).__init__(data, files, auto_id, prefix,
initial, error_class, label_suffix, empty_permitted, instance)
if instance:
# if it is a change form, keep those fields as not required
# password will be changed only if there is something entered inside
self.fields['password1'].required = False
self.fields['password1'].label = _('New password')
self.fields['password2'].required = False
self.fields['password2'].label = _('New password confirmation')
self._password_change = True
def clean_username(self):
if self.instance:
return self.cleaned_data['username']
return super(PageUserForm, self).clean_username()
def clean_password2(self):
if self.instance and self.cleaned_data['password1'] == '' and self.cleaned_data['password2'] == '':
self._password_change = False
return u''
return super(PageUserForm, self).clean_password2()
def clean(self):
cleaned_data = super(PageUserForm, self).clean()
notify_user = self.cleaned_data['notify_user']
if notify_user and not self.cleaned_data.get('email', None):
raise forms.ValidationError(_("Email notification requires valid email address."))
if self.cleaned_data['can_add_page'] and not self.cleaned_data['can_change_page']:
raise forms.ValidationError(_("The permission to add new pages requires the permission to change pages!"))
if self.cleaned_data['can_add_pageuser'] and not self.cleaned_data['can_change_pageuser']:
raise forms.ValidationError(_("The permission to add new users requires the permission to change users!"))
if self.cleaned_data['can_add_pagepermission'] and not self.cleaned_data['can_change_pagepermission']:
raise forms.ValidationError(_("To add permissions you also need to edit them!"))
return cleaned_data
def save(self, commit=True):
"""Create user, assign him to staff users, and create permissions for
him if required. Also assigns creator to user.
"""
Super = self._password_change and PageUserForm or UserCreationForm
user = super(Super, self).save(commit=False)
user.is_staff = True
created = not bool(user.pk)
# assign creator to user
if created:
get_current_user()
user.created_by = get_current_user()
if commit:
user.save()
save_permissions(self.cleaned_data, user)
if self.cleaned_data['notify_user']:
mail_page_user_change(user, created, self.cleaned_data['password1'])
return user
class PageUserGroupForm(GenericCmsPermissionForm):
class Meta:
model = PageUserGroup
fields = ('name', )
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
if instance:
initial = initial or {}
initial.update(self.populate_initials(instance))
super(PageUserGroupForm, self).__init__(data, files, auto_id, prefix,
initial, error_class, label_suffix, empty_permitted, instance)
def save(self, commit=True):
group = super(GenericCmsPermissionForm, self).save(commit=False)
created = not bool(group.pk)
# assign creator to user
if created:
group.created_by = get_current_user()
if commit:
group.save()
save_permissions(self.cleaned_data, group)
return group
| |
##for Raleigh & Grant
##who contributed more than they know
################################################################################
############################## WHEEL OF FORTUNE ################################
################################################################################
import random
import string
import time
import pickle
PUZZLE_FILENAME = "puzzles_and_clues.txt"
inFile = open(PUZZLE_FILENAME, "r")
puzzles_and_clues = pickle.load(inFile)
inFile.close()
def get_puzzle_and_clue(puzzles_and_clues):
"""
puzzles_and_clues: dictionary of puzzles and clues,
whose keys are clues and values are puzzles.
Returns tuple of length two, where first element is
clue and second element is puzzle.
"""
clue = puzzles_and_clues.keys()[random.randint(0, len(puzzles_and_clues.keys()) - 1)]
puzzle = puzzles_and_clues[clue][random.randint(0, len(puzzles_and_clues[clue]) - 1)]
puzzle_and_clue = (clue, string.upper(puzzle))
return puzzle_and_clue
def start():
"""
Starts game, initializes important variables, and calls function:
gameSetup(playerNames_hum, playerNames_comp, playerOrder_val, rounds)
"""
print string.center(("*" * 80), 80)
print string.center(("*" * 80), 80)
print string.center((("*" * 5) + (" " * 70) + ("*" * 5)), 80)
print string.center((("*" * 5) + (" " * 21) + "Welcome to WHEEL OF FORTUNE!" + (" " * 21) + ("*" * 5)), 80)
print string.center((("*" * 5) + (" " * 7) + "I'm your host, Pat Sajak, with your hostess Vanna White." + (" " * 7) + ("*" * 5)), 80)
print string.center((("*" * 5) + (" " * 70) + ("*" * 5)), 80)
print string.center(("*" * 80), 80)
print string.center(("*" * 80), 80)
playerNames_hum = ["Player 1", "Player 2", "Player 3"]
playerNames_comp = ["Chad Ledouche", "Braxton Beauregard"]
playerOrder_val = [[0, 0], [0, 0], [0, 0]]
rounds = ["first", "second", "third", "fourth"]
gameSetup(playerNames_hum, playerNames_comp, playerOrder_val, rounds)
def gameSetup(playerNames_hum, playerNames_comp, playerOrder_val, rounds):
"""
Calls game setup functions: get_numPlayers() and
get_playerNames(numPlayers, playerNames_hum, playerNames_comp)
Also calls function: game(players, playerOrder_val)
"""
numPlayers = get_numPlayers()
players = get_playerNames(numPlayers, playerNames_hum, playerNames_comp)
game(players, playerOrder_val)
def disp_scores(playerOrder_val):
print "playerOrder_val in disp_scores is:", playerOrder_val
playerOrder_val_round = playerOrder_val[:]
playerOrder_val_round.sort(reverse = True)
print "playerOrder_val in disp_scores is:", playerOrder_val
first = playerOrder_val[0]
second = playerOrder_val[1]
third = playerOrder_val[2]
if first[0] != second[0] and second[0] != third[0]:
print first[1], "in first place with $" + str(first[0]) + "."
print second[1], "in second place with $" + str(second[0]) + "."
print third[1], "in third place with $" + str(third[0]) + "."
if first[0] > second[0] and second[0] == third[0]:
print first[1], "in first place with $" + str(first[0]) + "."
print second[1], "and", third[1], "tied for second with $" + str(third[0]) + " each."
if first[0] == second[0] and second[0] > third[0]:
print second[1], "and", first[1], "tied for the lead with $" + str(third[0]) + " each ."
print third[1], "in second place with $" + str(third[0]) + "."
if first[0] == second[0] and second[0] == third[0]:
print second[1] + ", " + third[1] + ", and,", first[1], "tied for the lead with $" + str(third[0]) + " each ."
print "Surely, this is more improbable than the Big Bang (Theory's merciful cancellation.)"
def game(players, playerOrder_val):
"""
Calls function: get_playerOrder(players, playerOrder_val) and saves
result to variable: playerOrder
Calls function: game_round(players, playerOrder_val) and saves
result to variable: playerOrder_val
Iterates through function: game_round(playerOrder, playerOrder_val)
four times, each time returning variable: playerOrder_val
"""
# sets the number of rounds in the game
num_rounds = 4
# tracks the game's round number
round_num = 1
# list that tracks the starting order of players throughout game
playerOrder = get_playerOrder(players, playerOrder_val)
playerOrder_val = playerOrder_val
while round_num <= num_rounds:
## print "playerOrder_val is:", playerOrder_val
if round_num == 1:
playerOrder_val = [[200, 'Grant'], [3100, 'Raleigh'], [0, 'Eric']]
playerOrder_val = game_round(playerOrder, playerOrder_val, round_num)
## print "playerOrder_val:", playerOrder_val
print ""
print "At the end of ROUND", round_num, "the scores are:"
disp_scores(playerOrder_val)
print ""
round_num += 1
end_game(players)
def disp_puzzle_init(puzzle_and_clue):
disp_puzzle = ""
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] in string.punctuation or puzzle_and_clue[1][i] == " ":
disp_puzzle += puzzle_and_clue[1][i] + " "
else:
disp_puzzle += "_ "
return disp_puzzle
def incom_puzzle_init(puzzle_and_clue):
incom_puzzle = ""
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] in string.punctuation or puzzle_and_clue[1][i] == " ":
incom_puzzle += puzzle_and_clue[1][i]
else:
incom_puzzle += "_"
return incom_puzzle
def disp_remaining_letters(alpha):
vowels = ["A", "E", "I", "O", "U"]
consonants = ['B', 'C', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'X', 'Y', 'Z']
disp_vowels = ""
disp_consonants = ""
for i in range(len(vowels)):
if vowels[i] in alpha:
disp_vowels += vowels[i] + " "
for i in range(len(consonants)):
if consonants[i] in alpha:
disp_consonants += consonants[i] + " "
print ""
print "Remaining letters:"
print "Vowels: " + disp_vowels
print "Consonants: " + disp_consonants
print ""
def disp_puzzle_and_clue(puzzle_and_clue, disp_puzzle):
print ""
print 'Clue: "' + puzzle_and_clue[0] + '"'
print "Puzzle: " + disp_puzzle
def game_round(playerOrder, playerOrder_val, round_num):
print string.center(("*" * 80), 80)
print string.center(("*" * 80), 80)
print ""
print string.center("ROUND " + str(round_num), 80)
print ""
print string.center(("*" * 80), 80)
print string.center(("*" * 80), 80)
## playerOrder_val = playerOrder_val
playerOrder_val_round = [[0, 0], [0, 0], [0, 0]]
# retrieves and stores tuple, length two, whose first element
# is round clue and second element is puzzle
puzzle_and_clue = get_puzzle_and_clue(puzzles_and_clues)
## puzzle_and_clue = ("Event", "EARTHQUAKE")
# retrieves and stores string of empty puzzle
disp_puzzle = disp_puzzle_init(puzzle_and_clue)
# fills out as letters are guessed; not meant to be printed
incom_puzzle = incom_puzzle_init(puzzle_and_clue)
# stores uppercase alphabet in variable alpha
alpha = string.ascii_uppercase
turn_num = 1
num_turns = 10
print "The puzzle and clue for ROUND", round_num, "are:"
print puzzle_and_clue[1]
player_turn = (round_num - 1) % 3
while incom_puzzle != puzzle_and_clue[1]:
if turn_num == num_turns:
break
while num_turns >= turn_num:
turn_num += 1
if turn_num == num_turns:
break
disp_puzzle_and_clue(puzzle_and_clue, disp_puzzle)
disp_remaining_letters(alpha)
player_selection = get_player_selection(playerOrder, player_turn, playerOrder_val_round)
if player_selection == 0:
print ""
print "You chose to solve the puzzle."
guess = string.upper(get_guessWord())
if guess == puzzle_and_clue[1]:
incom_puzzle = puzzle_and_clue[1]
else:
print ""
print "Sorry, " + playerOrder[player_turn] + ", that is not the solution to the puzzle."
print "Possession of the Wheel passes to " + playerOrder[(player_turn + 1) % 3] + "."
print ""
print string.center(("-" * 80), 80)
time.sleep(1)
player_turn = (player_turn + 1) % 3
if player_selection == 1:
print ""
print "You chose to spin The Wheel."
prize = get_prize(game_round)
subPrize = prize
if prize == "bankrupt":
playerOrder_val = bankrupt(player_turn, playerOrder, playerOrder_val_round)
player_turn = (player_turn + 1) % 3
if prize == "loseATurn":
lose_a_turn(player_turn, playerOrder)
player_turn = (player_turn + 1) % 3
if prize == "freePlay":
freePlay_choice = 0
print ""
print playerOrder[player_turn], "spun for a FREE PLAY!"
print playerOrder[player_turn] + ", you may solve or guess a letter (including vowels) without penalty."
print ""
selection_freePlay = get_freePlayChoice(playerOrder[player_turn])
subPrize = 500
if selection_freePlay == 1:
guess = string.upper(get_guessfreePlay())
print string.center(("-" * 80), 80)
print ""
print string.center(("Vanna, does the puzzle contain any '" + guess + "'s?"), 80)
print ""
print string.center(("-" * 80), 80)
time.sleep(0.7)
letter_app = 0
print ""
print disp_puzzle
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] == guess:
time.sleep(0.7)
disp_puzzle = disp_puzzle[0:(i * 2)] + guess + disp_puzzle[((i * 2) + 1):]
incom_puzzle = incom_puzzle[0:i] + guess + incom_puzzle[(i + 1):]
print ""
print disp_puzzle
letter_app += 1
playerOrder_val_round[player_turn][0] = guess_result(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app)
if incom_puzzle == puzzle_and_clue[1]:
break
if selection_freePlay == 2:
guess_word = get_guessWord()
guess_word = string.upper(guess_word)
if guess_word == puzzle_and_clue[1]:
incom_puzzle = guess_word
break
else:
print ""
print "Sorry, that is not the solution to the puzzle."
print "Your Free Play spin, however, means that you keep possession of The Wheel."
print ""
print string.center(("-" * 80), 80)
if type(prize) is int:
print ""
print playerOrder[player_turn] + " spun for $" + str(prize) + "!"
print ""
guess = get_guessConsonant()
if guess in alpha:
alpha = alpha.replace(guess, "")
print string.center(("-" * 80), 80)
print ""
print string.center(("Vanna, does the puzzle contain any '" + guess + "'s?"), 80)
print ""
print string.center(("-" * 80), 80)
time.sleep(0.7)
letter_app = 0
print ""
print disp_puzzle
time.sleep(0.7)
if guess in puzzle_and_clue[1]:
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] == guess:
## print disp_puzzle
disp_puzzle = disp_puzzle[0:(i * 2)] + guess + disp_puzzle[((i * 2) + 1):]
incom_puzzle = incom_puzzle[0:i] + guess + incom_puzzle[(i + 1):]
print ""
print disp_puzzle
time.sleep(0.7)
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] == guess:
letter_app += 1
playerOrder_val_round[player_turn][0] = guess_result(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app)
if incom_puzzle == puzzle_and_clue[1]:
break
else:
print ""
print string.center(("-" * 80), 80)
print ""
print "I'm sorry", playerOrder[player_turn] + ", but there are no '" + guess + "'s in the puzzle."
print ""
print "Possession of The Wheel passes to " + playerOrder[(player_turn + 1) % 3] + "."
print ""
print string.center(("-" * 80), 80)
time.sleep(1.5)
player_turn = (player_turn + 1) % 3
else:
guess_previously_called(player_turn, playerOrder, guess)
if player_selection == 2:
print ""
print "You chose to buy a vowel."
print ""
playerOrder_val_round[player_turn][0] = (playerOrder_val_round[player_turn][0] - 250)
guess = string.upper(get_guessVowel())
if guess in alpha:
alpha = alpha.replace(guess, "")
else:
guess_previously_called(player_turn, playerOrder, guess)
player_turn = (player_turn + 1) % 3
break
print ""
print string.center(("-" * 80), 80)
print ""
print string.center(("Vanna, does the puzzle contain any '" + guess + "'s?"), 80)
print ""
print string.center(("-" * 80), 80)
time.sleep(0.7)
print ""
print disp_puzzle
letter_app = 0
if guess in puzzle_and_clue[1]:
for i in range(len(puzzle_and_clue[1])):
if puzzle_and_clue[1][i] == guess:
time.sleep(0.7)
## print disp_puzzle
disp_puzzle = disp_puzzle[0:(i * 2)] + guess + disp_puzzle[((i * 2) + 1):]
incom_puzzle = incom_puzzle[0:i] + guess + incom_puzzle[(i + 1):]
print ""
print disp_puzzle
letter_app += 1
if letter_app == 0:
print ""
print string.center(("-" * 80), 80)
print ""
print "I'm sorry", playerOrder[player_turn] + ", but there are no '" + guess + "'s in the puzzle."
print ""
print "Possession of The Wheel passes to " + playerOrder[(player_turn + 1) % 3] + "."
print ""
print string.center(("-" * 80), 80)
time.sleep(1.5)
player_turn = (player_turn + 1) % 3
break
if letter_app == 1:
print ""
print "Good guess,", playerOrder[player_turn] + "! There is 1", guess, "in the puzzle!"
print ""
print string.center(("-" * 80), 80)
print ""
if letter_app >= 2:
print ""
print "Good guess,", playerOrder[player_turn] + "! There are", letter_app, "'" + guess + "'s in the puzzle!"
print ""
print string.center(("-" * 80), 80)
print ""
if incom_puzzle == puzzle_and_clue[1]:
playerOrder_val[player_turn][0] = playerOrder_val_round[player_turn][0] + playerOrder_val[player_turn][0]
print string.center(("-" * 80), 80)
time.sleep(2.5)
print ""
print "Congratulations,", playerOrder[player_turn] + ". You correctly solved the puzzle:"
print string.upper(puzzle_and_clue[1])
print ""
break
print "playerOrder_val right before func return:", playerOrder_val
return playerOrder_val
def end_game(players):
print "----------------------"
print "GAME OVER!"
print "----------------------"
print "Would you like to play again? (y/n)"
selection = string.lower(raw_input())
if selection == "y" or selection == "yes":
playerOrder_val = [[0, 0], [0, 0], [0, 0]]
game(players, playerOrder_val)
def get_numPlayers():
numPlayers = 0
while numPlayers <= 0 or numPlayers > 3:
print ""
print "How many contestants (max: 3) will be playing today?"
numPlayers = raw_input("Number of players: ",)
if numPlayers == "One" or numPlayers == "one" or numPlayers == "ONE" or numPlayers == "1":
numPlayers = 1
print "You have selected play for 1 player."
if numPlayers == "Two" or numPlayers == "two" or numPlayers == "TWO" or numPlayers == "2":
numPlayers = 2
print "You have selected play for 2 players."
if numPlayers == "Three" or numPlayers == "three" or numPlayers == "THREE" or numPlayers == "3":
numPlayers = 3
print "You have selected play for 3 players."
if numPlayers < 1 or numPlayers > 3 or numPlayers == type(int):
print ""
print string.center(("-" * 80), 80)
print "ERROR: INVALID PLAYER NUMBER"
print string.center(("-" * 80), 80)
return numPlayers
def get_playerNames(numPlayers, playerNames_hum, playerNames_comp):
players = ["Player 1", "Player 2", "Player 3"]
print ""
## print string.center(("-" * 80), 80)
## print string.center(("-" * 80), 80)
for i in range(numPlayers):
name = ""
while name == "":
name = raw_input(players[i] + ", what is your name? ")
name = name.title()
if name == "":
print ""
print string.center(("-" * 80), 80)
print string.expandtabs("ERROR, FIELD EMPTY")
print string.expandtabs("Please try again.")
print string.center(("-" * 80), 80)
print ""
players[i] = name
if numPlayers == 3:
print ""
## print string.center(("-" * 80), 80)
print string.center(("-" * 80), 80)
print ""
print "Welcome", players[0] + ",", players[1] + ", and", players[2] + "!"
print ""
if numPlayers == 2:
players[2] = playerNames_comp[0]
print ""
## print string.center(("-" * 80), 80)
print "Welcome", players[0] + " and", players[1] + "! Today you will be playing against", players[2] + "."
if numPlayers == 1:
players[1] = playerNames_comp[0]
players[2] = playerNames_comp[1]
print ""
## print string.center(("-" * 80), 80)
print "Welcome", players[0] + "! Today you will be playing against", players[1], "and", players[2] + "."
return players
def get_playerOrder(players, playerOrder_val):
playerOrder = [0, 0, 0]
print "We will now play the Toss-Up Puzzle for possession of The Wheel in the first"
print "round."
print ""
print players[0] + " will spin first."
print ""
print string.center(("-" * 80), 80)
raw_input ("Press 'ENTER' to continue: ")
for i in (0, 1, 2):
## if i == 0:
## print ""
## print players[i] + " will spin first."
print ""
print players[i] + ", get ready. You're up next!"
print players[i] + " prepares to spin The Wheel."
## print string.center(("-" * 80), 80)
print ""
raw_input("Press 'ENTER' to spin The Wheel. ")
print ""
print string.center(("-" * 80), 80)
print string.center((players[i] + " received $" + str(i * 100) + "."), 80)
print string.center(("-" * 80), 80)
for j in (0, 1):
if j == 0:
playerOrder_val[i][j] = (i * 100)
else:
playerOrder_val[i][j] = players[i]
playerOrder_val.sort(reverse=True)
for i in range(3):
playerOrder[i] = playerOrder_val[i][1]
print ""
print "Congratulations,", playerOrder[0] + "! You have won the Toss-Up Spin and will take possession"
print "of The Wheel at the beginning of the first round."
print ""
print playerOrder[1] + " will Take possession of The Wheel after", playerOrder[0] + ", followed by", playerOrder[2] + "."
print ""
print string.center(("-" * 80), 80)
raw_input ("Press 'ENTER' to begin the first round: ")
print ""
return playerOrder
def get_playerOrder_val(playerOrder_val):
for i in (0, 1):
if j == 0:
playerOrder_val[i][player_turn] = (i * 100)
def get_guessConsonant():
check = False
while check == False:
guess = string.upper(raw_input("Please guess a consonant: ",))
if len(guess) == 1 and guess in ['B', 'C', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'X', 'Y', 'Z']:
check = True
if len(guess) != 1:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: INVALID ENTRY!"
print "Please enter one letter per guess."
print string.center(("-" * 80) , 80)
print ""
check = False
if guess in ["A", "E", "I", "O", "U"]:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: INVALID ENTRY!"
print "Entry must be a consonant."
print string.center(("-" * 80) , 80)
print ""
check = False
return guess
def get_guessfreePlay():
check = False
while check == False:
guess = string.upper(raw_input("Please guess a letter: ",))
if len(guess) == 1 and guess in string.ascii_uppercase:
check = True
if len(guess) != 1:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: INVALID ENTRY!"
print "Please enter one letter per guess."
print string.center(("-" * 80) , 80)
print ""
check = False
return guess
def get_guessVowel():
check = False
while check == False:
guess = string.upper(raw_input("Please guess a vowel: ",))
if len(guess) == 1 and guess in ["A", "E", "I", "O", "U"]:
check = True
if len(guess) != 1:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: INVALID ENTRY!"
print "Please enter one letter per guess."
print string.center(("-" * 80) , 80)
print ""
check = False
if guess in ['B', 'C', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'X', 'Y', 'Z']:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: INVALID ENTRY!"
print "Entry must be a vowel."
print string.center(("-" * 80) , 80)
print ""
check = False
return guess
def get_prize(game_round):
prize = 0
if game_round == 1:
prizes = ["bankrupt", "bankrupt", "bankrupt", "bankrupt", "bankrupt",
"bankrupt", "bankrupt", "bankrupt", 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 550, 550, 550, 600, 600, 600, 600, 600,
600, 650, 650, 650, 650, 650, 650, 700, 700, 700, 700, 700,
700, 700, 700, 700, 800, 800, 800, 800, 800, 800, 900, 900,
900, 900, 900, 900, 900, 900, 900, 2500, 2500, 2500, "loseATurn",
"loseATurn", "loseATurn", "freePlay", "freePlay", "freePlay", 750, 750,
750, 750]
prize = prizes[random.randint(0, 71)]
if game_round == 2:
prizes = ["bankrupt", "bankrupt", "bankrupt", "bankrupt", "bankrupt",
"bankrupt", "bankrupt", "bankrupt", 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 550, 550, 550, 600, 600, 600, 600, 600,
600, 650, 650, 650, 650, 650, 650, 700, 700, 700, 700, 700,
700, 700, 700, 700, 800, 800, 800, 800, 800, 800, 900, 900,
900, 900, 900, 900, 900, 900, 900, 3500, 3500, 3500, "loseATurn",
"loseATurn", "loseATurn", "freePlay", "freePlay", "freePlay", 750, 750,
750, 750]
prize = prizes[random.randint(0, 71)]
if game_round == 3:
prizes = ["bankrupt", "bankrupt", "bankrupt", "bankrupt", "bankrupt",
"bankrupt", "bankrupt", "bankrupt", 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 550, 550, 550, 600, 600, 600, 600, 600,
600, 650, 650, 650, 650, 650, 650, 700, 700, 700, 700, 700,
700, 700, 700, 700, 800, 800, 800, 800, 800, 800, 900, 900,
900, 900, 900, 900, 900, 900, 900, 3500, 3500, 3500, "loseATurn",
"loseATurn", "loseATurn", "freePlay", "freePlay", "freePlay", 750, 750,
750, 750]
prize = prizes[random.randint(0, 71)]
if game_round >= 4:
prizes = ["bankrupt", "bankrupt", "bankrupt", "bankrupt", "bankrupt",
"bankrupt", "bankrupt", "bankrupt", 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 550, 550, 550, 600, 600, 600, 600, 600,
600, 650, 650, 650, 650, 650, 650, 700, 700, 700, 700, 700,
700, 700, 700, 700, 800, 800, 800, 800, 800, 800, 900, 900,
900, 900, 900, 900, 900, 900, 900, 5000, 5000, 5000, "loseATurn",
"loseATurn", "loseATurn", "freePlay", "freePlay", "freePlay", 750, 750,
750, 750]
prize = prizes[random.randint(0, 71)]
return prize
def get_guessWord():
print ""
guess = string.lower(raw_input("Input puzzle solution: ",))
print ""
return guess
def get_freePlayChoice(player):
selection_freePlay = 0
choice = False
while choice is False:
while selection_freePlay != "letter" or selection_freePlay != "choose" or selection_freePlay != "s" or selection_freePlay != "solve" or selection_freePlay != "choose a letter" or selection_freePlay != "pick" or selection_freePlay != "pick a letter" or selection_freePlay == "solve the puzzle":
print string.center(("-" * 80), 80)
print ""
print player + ", would you like to solve the puzzle or choose a letter?"
selection_freePlay = raw_input("Selection: ")
selection_freePlay = selection_freePlay.lower()
if selection_freePlay == "letter" or selection_freePlay == "choose" or selection_freePlay == "s" or selection_freePlay == "solve the puzzle" or selection_freePlay == "solve" or selection_freePlay == "choose a letter" or selection_freePlay == "pick" or selection_freePlay == "pick a letter":
break
else:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: UNRECOGNIZED COMMAND."
print "Please select from the following and try again:"
print "'SOLVE'"
print "'LETTER'"
print "'CHOOSE'"
print "'CHOOSE A LETTER'"
print "'PICK'"
print "'PICK A LETTER'"
print string.center(("-" * 80) , 80)
print ""
if selection_freePlay == "pick a letter" or selection_freePlay == "pick" or selection_freePlay == "letter" or selection_freePlay == "choose":
selection_freePlay = 1
if selection_freePlay == "solve" or selection_freePlay == "solve the puzzle" or selection_freePlay == "s":
selection_freePlay = 2
return selection_freePlay
def get_player_selection(playerOrder, player_turn, playerOrder_val_round):
selection = 0
choice = False
while choice is False:
while selection != "solve" or selection != "spin" or selection != "s" or selection != "pick" or selection != "solve the puzzle" or selection != "buy" or selection != "buy a vowel" or selection != "vowel" or selection != "v":
print string.center(("-" * 80), 80)
if playerOrder_val_round[player_turn][0] >= 250:
print ""
print playerOrder[player_turn] + ", would you like to SPIN, BUY A VOWEL, or SOLVE THE PUZZLE?"
else:
print ""
print playerOrder[player_turn] + ", would you like to SPIN or SOLVE THE PUZZLE?"
selection = raw_input("Selection: ")
selection = selection.lower()
if selection == "solve" or selection == "pick" or selection == "spin" or selection == "solve the puzzle" or selection == "buy" or selection == "buy a vowel" or selection == "vowel" or selection == "v":
break
else:
print ""
print string.center(("-" * 80) , 80)
print "ERROR: UNRECOGNIZED COMMAND."
print "Please select from the following and try again:"
print "'SOLVE'"
print "'BUY A VOWEL'"
print "'SPIN'"
if selection == "pick a letter" or selection == "pick" or selection == "spin" or selection == "letter":
selection = 1
return selection
if selection == "buy" or selection == "buy a vowel" or selection == "vowel":
if playerOrder_val_round[player_turn][0] >= 250:
selection = 2
return selection
else:
print ""
print "You need a round prize of at least $250 in order to buy a vowel."
print "Please try again."
print ""
if selection == "solve" or selection == "solve the puzzle":
selection = 0
return selection
def bankrupt(player_turn, playerOrder, playerOrder_val_round):
print ""
print playerOrder[player_turn], "spun for BANKRUPT, bringing his total prize for this round to $0."
playerOrder_val_round[player_turn][0] = 0
print "Possession of The Wheel passes to", playerOrder[((player_turn + 1) % 3)] + "."
print ""
print string.center(("-" * 80), 80)
time.sleep(2.5)
return playerOrder_val_round
def lose_a_turn(player_turn, playerOrder):
print ""
print playerOrder[player_turn], "spun for LOSE A TURN!"
print ""
print "Sorry, " + playerOrder[player_turn] + ". Possession of The Wheel passes to " + playerOrder[(player_turn + 1) % 3] + "."
print string.center(("-" * 80), 80)
time.sleep(2.5)
def letter_app_sing(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app):
time.sleep(0.7)
print ""
print "Good guess,", playerOrder[player_turn] + "! There is 1", guess, "in the puzzle!"
print "That adds $" + str(subPrize) + " to your total prize score!"
print ""
playerOrder_val_round[player_turn][0] = playerOrder_val_round[player_turn][0] + (subPrize * letter_app)
print string.center(("-" * 80), 80)
print ""
print string.center((playerOrder[player_turn] + "'s total prize score for this round is now $" + str(playerOrder_val_round[player_turn][0]) + "!"), 80)
print ""
print string.center(("-" * 80), 80)
time.sleep(2.5)
return playerOrder_val_round[player_turn][0]
def letter_app_plur(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app):
time.sleep(0.7)
print ""
print "Good guess,", playerOrder[player_turn] + "! There are", letter_app, "'" + guess + "'s in the puzzle!"
print "That adds $" + str(subPrize * letter_app) + " to your total prize score!"
print ""
playerOrder_val_round[player_turn][0] = playerOrder_val_round[player_turn][0] + (subPrize * letter_app)
print string.center(("-" * 80), 80)
print ""
print string.center((playerOrder[player_turn] + "'s total prize score is now $" + str(playerOrder_val_round[player_turn][0]) + "!"), 80)
print ""
print string.center(("-" * 80), 80)
time.sleep(2.5)
return playerOrder_val_round[player_turn][0]
def guess_result(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app):
if letter_app == 0:
print ""
print "I'm sorry", playerOrder[player_turn] + ", but there are no '" + guess + "s in the puzzle."
print "Your Free Play, however, means that you keep possession of The Wheel."
print ""
print string.center(("-" * 80), 80)
if letter_app == 1:
playerOrder_val_round[player_turn][0] = letter_app_sing(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app)
if letter_app >= 2:
playerOrder_val_round[player_turn][0] = letter_app_plur(player_turn, playerOrder, playerOrder_val_round, guess, subPrize, letter_app)
return playerOrder_val_round[player_turn][0]
def guess_previously_called(player_turn, playerOrder, guess):
print ""
print "Sorry, '" + guess + "' has already been called in this round."
print playerOrder[(player_turn + 1) % 3] + " now takes possession of The Wheel."
print ""
print string.center(("-" * 80), 80)
time.sleep(1.5)
return playerOrder
start()
| |
"""
ui views
"""
import json
import logging
from urllib.parse import urlencode
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.shortcuts import Http404, redirect, render
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View, TemplateView
from rolepermissions.permissions import available_perm_status
from rolepermissions.checkers import has_role
from cms.util import get_coupon_code
from courses.models import Program, Course
from ecommerce.models import Coupon
from micromasters.utils import webpack_dev_server_host
from micromasters.serializers import serialize_maybe_user
from profiles.permissions import CanSeeIfNotPrivate
from roles.models import Instructor, Staff
from ui.decorators import require_mandatory_urls
from ui.templatetags.render_bundle import public_path
log = logging.getLogger(__name__)
class ReactView(View):
"""
Abstract view for templates using React
"""
template_name = "dashboard.html"
def get_context(self, request):
"""
Get the context for the view
Args:
request (Request): the incoming request
Returns:
dict: the context object as a dictionary
"""
user = request.user
roles = []
if not user.is_anonymous:
roles = [
{
'program': role.program.id,
'role': role.role,
'permissions': [perm for perm, value in available_perm_status(user).items() if value is True]
} for role in user.role_set.all()
]
js_settings = {
"gaTrackingID": settings.GA_TRACKING_ID,
"reactGaDebug": settings.REACT_GA_DEBUG,
"host": webpack_dev_server_host(request),
"edx_base_url": settings.EDXORG_BASE_URL,
"mitxonline_base_url": settings.MITXONLINE_BASE_URL,
"mitxonline_url": settings.MITXONLINE_URL,
"roles": roles,
"release_version": settings.VERSION,
"environment": settings.ENVIRONMENT,
"sentry_dsn": settings.SENTRY_DSN,
"search_url": reverse('search_api', kwargs={"elastic_url": ""}),
"support_email": settings.EMAIL_SUPPORT,
"user": serialize_maybe_user(request.user),
"es_page_size": settings.ELASTICSEARCH_DEFAULT_PAGE_SIZE,
"public_path": public_path(request),
"FEATURES": {
"PROGRAM_LEARNERS": settings.FEATURES.get('PROGRAM_LEARNERS_ENABLED', False),
"DISCUSSIONS_POST_UI": settings.FEATURES.get('OPEN_DISCUSSIONS_POST_UI', False),
"DISCUSSIONS_CREATE_CHANNEL_UI": settings.FEATURES.get('OPEN_DISCUSSIONS_CREATE_CHANNEL_UI', False),
"PROGRAM_RECORD_LINK": settings.FEATURES.get('PROGRAM_RECORD_LINK', False),
"ENABLE_PROGRAM_LETTER": settings.FEATURES.get('ENABLE_PROGRAM_LETTER', False),
},
"open_discussions_redirect_url": settings.OPEN_DISCUSSIONS_REDIRECT_URL,
}
return {
"has_zendesk_widget": True,
"is_public": False,
"google_maps_api": False,
"js_settings_json": json.dumps(js_settings),
"ga_tracking_id": "",
"support_email": settings.EMAIL_SUPPORT,
}
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
"""
Handle GET requests to templates using React
"""
return render(
request,
self.template_name,
context=self.get_context(request),
)
def post(self, request, *args, **kwargs): # pylint: disable=unused-argument
"""Redirect to GET. This assumes there's never any good reason to POST to these views."""
return redirect(request.build_absolute_uri())
@method_decorator(require_mandatory_urls, name='dispatch')
@method_decorator(login_required, name='dispatch')
@method_decorator(csrf_exempt, name='dispatch')
class DashboardView(ReactView):
"""
Wrapper for dashboard view which asserts certain logged in requirements
"""
class UsersView(ReactView):
"""
View for learner pages. This gets handled by the dashboard view like all other
React handled views, but we also want to return a 404 if the user does not exist.
"""
def get(self, request, *args, **kwargs):
"""
Handle GET requests
"""
user = kwargs.pop('user')
if user is not None:
if not CanSeeIfNotPrivate().has_permission(request, self):
raise Http404
elif request.user.is_anonymous:
# /learner/ redirects to logged in user's page, but user is not logged in here
raise Http404
return super().get(request, *args, **kwargs)
class SignInView(ReactView):
"""Sign In view"""
template_name = "signin.html"
def get_context(self, request):
"""
Get the context for the view
Args:
request (Request): the incoming request
Returns:
dict: the context object as a dictionary
"""
context = super().get_context(request)
program_id = request.GET.get('program', None)
next_url = request.GET.get('next', None)
mitxonline_enabled = settings.FEATURES.get("MITXONLINE_LOGIN", False)
program = Program.objects.filter(id=program_id[0]).first() if mitxonline_enabled and program_id else None
params = {"next": next_url}
return {
**context,
"program": program,
"login_qs": f"?{urlencode(params)}" if next_url else '',
}
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
"""
Handle GET requests to templates using React
"""
context = self.get_context(request)
coupon_code = get_coupon_code(request)
# if we didn't get a program in the context, look it up via the coupon code
if (
settings.FEATURES.get("MITXONLINE_LOGIN", False)
and coupon_code
and context["program"] is None
):
program = None
coupon = Coupon.objects.filter(coupon_code=coupon_code).first()
if coupon is not None:
if isinstance(coupon.content_object, Program):
program = coupon.content_object
elif isinstance(coupon.content_object, Course):
program = coupon.content_object.program
if program:
params = request.GET.copy()
params["program"] = program.id
return redirect(
f"{reverse('signin')}?{params.urlencode()}",
)
return render(
request,
self.template_name,
context=context,
)
def standard_error_page(request, status_code, template_filename):
"""
Returns an error page with a given template filename and provides necessary context variables
"""
name = request.user.profile.preferred_name if not request.user.is_anonymous else ""
authenticated = not request.user.is_anonymous
response = render(
request,
template_filename,
context={
"has_zendesk_widget": True,
"is_public": True,
"js_settings_json": json.dumps({
"release_version": settings.VERSION,
"environment": settings.ENVIRONMENT,
"sentry_dsn": settings.SENTRY_DSN,
"user": serialize_maybe_user(request.user),
}),
"authenticated": authenticated,
"name": name,
"username": request.user.username,
"is_staff": has_role(request.user, [Staff.ROLE_ID, Instructor.ROLE_ID]),
"support_email": settings.EMAIL_SUPPORT,
"sentry_dsn": settings.SENTRY_DSN,
}
)
response.status_code = status_code
return response
def page_404(request, *args, **kwargs): # pylint: disable=unused-argument
"""
Overridden handler for the 404 error pages.
"""
return standard_error_page(request, 404, "404.html")
def page_500(request, *args, **kwargs): # pylint: disable=unused-argument
"""
Overridden handler for the 404 error pages.
"""
return standard_error_page(request, 500, "500.html")
def need_verified_email(request, *args, **kwargs): # pylint: disable=unused-argument
"""
Returns error page for unverified email on edX
"""
return standard_error_page(request, 401, "verify_email.html")
def oauth_maintenance(request, *args, **kwargs): # pylint: disable=unused-argument
"""
Returns maintenance page during oauth downtime
"""
return standard_error_page(request, 200, "oauth_maintenance.html")
class BackgroundImagesCSSView(TemplateView):
"""
Pass a CSS file through Django's template system, so that we can make
the URLs point to a CDN.
"""
template_name = "background-images.css"
content_type = "text/css"
| |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers that are not directly related to course content."""
__author__ = 'Saifu Angto (saifu@google.com)'
import datetime
import gettext
import HTMLParser
import os
import re
import urlparse
import jinja2
import sites
import webapp2
import appengine_config
from common import jinja_utils
from common import locales
from common import safe_dom
from common import tags
from common import utils as common_utils
from common.crypto import XsrfTokenManager
from models import courses
from models import models
from models import transforms
from models.config import ConfigProperty
from models.courses import Course
from models.models import Student
from models.models import StudentProfileDAO
from models.models import TransientStudent
from models.roles import Roles
from modules import courses as courses_module
from google.appengine.api import users
# The name of the template dict key that stores a course's base location.
COURSE_BASE_KEY = 'gcb_course_base'
# The name of the template dict key that stores data from course.yaml.
COURSE_INFO_KEY = 'course_info'
# The name of the cookie used to store the locale prefs for users out of session
GUEST_LOCALE_COOKIE = 'cb-user-locale'
GUEST_LOCALE_COOKIE_MAX_AGE_SEC = 48 * 60 * 60 # 48 hours
TRANSIENT_STUDENT = TransientStudent()
# Whether to output debug info into the page.
CAN_PUT_DEBUG_INFO_INTO_PAGES = ConfigProperty(
'gcb_can_put_debug_info_into_pages', bool, (
'Whether or not to put debugging information into the web pages. '
'This may be useful for debugging purposes if you develop custom '
'Course Builder features or extensions.'), False)
# Whether to record page load/unload events in a database.
CAN_PERSIST_PAGE_EVENTS = ConfigProperty(
'gcb_can_persist_page_events', bool, (
'Whether or not to record student page interactions in a '
'datastore. Without event recording, you cannot analyze student '
'page interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Whether to record tag events in a database.
CAN_PERSIST_TAG_EVENTS = ConfigProperty(
'gcb_can_persist_tag_events', bool, (
'Whether or not to record student tag interactions in a '
'datastore. Without event recording, you cannot analyze student '
'tag interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Whether to record events in a database.
CAN_PERSIST_ACTIVITY_EVENTS = ConfigProperty(
'gcb_can_persist_activity_events', bool, (
'Whether or not to record student activity interactions in a '
'datastore. Without event recording, you cannot analyze student '
'activity interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Date format string for displaying datetimes in UTC.
# Example: 2013-03-21 13:00 UTC
HUMAN_READABLE_DATETIME_FORMAT = '%Y-%m-%d, %H:%M UTC'
# Date format string for displaying dates. Example: 2013-03-21
HUMAN_READABLE_DATE_FORMAT = '%Y-%m-%d'
# Time format string for displaying times. Example: 01:16:40 UTC.
HUMAN_READABLE_TIME_FORMAT = '%H:%M:%S UTC'
class PageInitializer(object):
"""Abstract class that defines an interface to initialize page headers."""
@classmethod
def initialize(cls, template_value):
raise NotImplementedError
class DefaultPageInitializer(PageInitializer):
"""Implements default page initializer."""
@classmethod
def initialize(cls, template_value):
pass
class PageInitializerService(object):
"""Installs the appropriate PageInitializer."""
_page_initializer = DefaultPageInitializer
@classmethod
def get(cls):
return cls._page_initializer
@classmethod
def set(cls, page_initializer):
cls._page_initializer = page_initializer
class ReflectiveRequestHandler(object):
"""Uses reflection to handle custom get() and post() requests.
Use this class as a mix-in with any webapp2.RequestHandler to allow request
dispatching to multiple get() and post() methods based on the 'action'
parameter.
Open your existing webapp2.RequestHandler, add this class as a mix-in.
Define the following class variables:
default_action = 'list'
get_actions = ['default_action', 'edit']
post_actions = ['save']
Add instance methods named get_list(self), get_edit(self), post_save(self).
These methods will now be called automatically based on the 'action'
GET/POST parameter.
"""
def create_xsrf_token(self, action):
return XsrfTokenManager.create_xsrf_token(action)
def get(self):
"""Handles GET."""
action = self.request.get('action')
if not action:
action = self.default_action
if action not in self.get_actions:
self.error(404)
return
handler = getattr(self, 'get_%s' % action)
if not handler:
self.error(404)
return
return handler()
def post(self):
"""Handles POST."""
action = self.request.get('action')
if not action or action not in self.post_actions:
self.error(404)
return
handler = getattr(self, 'post_%s' % action)
if not handler:
self.error(404)
return
# Each POST request must have valid XSRF token.
xsrf_token = self.request.get('xsrf_token')
if not XsrfTokenManager.is_xsrf_token_valid(xsrf_token, action):
self.error(403)
return
return handler()
def _get_course_properties():
return Course.get_environ(sites.get_course_for_current_request())
def display_unit_title(unit, course_properties=None):
"""Prepare an internationalized display for the unit title."""
if not course_properties:
course_properties = _get_course_properties()
if course_properties['course'].get('display_unit_title_without_index'):
return unit.title
else:
# I18N: Message displayed as title for unit within a course.
return gettext.gettext('Unit %s - %s' % (unit.index, unit.title))
def display_short_unit_title(unit, course_properties=None):
"""Prepare a short unit title."""
if not course_properties:
course_properties = _get_course_properties()
if course_properties['course'].get('display_unit_title_without_index'):
return unit.title
if unit.type != 'U':
return unit.title
# I18N: Message displayed as title for unit within a course.
return '%s %s' % (gettext.gettext('Unit'), unit.index)
def display_lesson_title(unit, lesson, course_properties=None):
"""Prepare an internationalized display for the unit title."""
if not course_properties:
course_properties = _get_course_properties()
content = safe_dom.NodeList()
span = safe_dom.Element('span')
content.append(span)
if lesson.auto_index:
prefix = ''
if course_properties['course'].get('display_unit_title_without_index'):
prefix = '%s ' % lesson.index
else:
prefix = '%s.%s ' % (unit.index, lesson.index)
span.add_text(prefix)
_class = ''
else:
_class = 'no-index'
span.add_text(lesson.title)
span.set_attribute('className', _class)
return content
class HtmlHooks(object):
def __init__(self, course, prefs=None):
self.course = course
self.prefs = prefs
if self.prefs is None:
self.prefs = models.StudentPreferencesDAO.load_or_create()
def _has_visible_content(self, html_text):
class VisibleHtmlParser(HTMLParser.HTMLParser):
def __init__(self, *args, **kwargs):
HTMLParser.HTMLParser.__init__(self, *args, **kwargs)
self._has_visible_content = False
def handle_starttag(self, unused_tag, unused_attrs):
# Not 100% guaranteed; e.g., <p> does not guarantee content,
# but <button> does -- even if the <button> does not contain
# data/entity/char. I don't want to spend a lot of logic
# looking for specific cases, and this behavior is enough.
self._has_visible_content = True
def handle_data(self, data):
if data.strip():
self._has_visible_content = True
def handle_entityref(self, unused_data):
self._has_visible_content = True
def handle_charref(self, unused_data):
self._has_visible_content = True
def has_visible_content(self):
return self._has_visible_content
parser = VisibleHtmlParser()
parser.feed(html_text)
parser.close()
return parser.has_visible_content()
def insert(self, name):
# Do we want page markup to permit course admins to edit hooks?
show_admin_content = False
if (self.prefs and self.prefs.show_hooks and
Roles.is_course_admin(self.course.app_context)):
show_admin_content = True
if self.course.version == courses.CourseModel12.VERSION:
show_admin_content = False
# Look up desired content chunk in course.yaml dict/sub-dict.
content = ''
environ = self.course.app_context.get_environ()
for part in name.split(':'):
if part in environ:
item = environ[part]
if type(item) == str:
content = item
else:
environ = item
if show_admin_content and not self._has_visible_content(content):
content += name
# Add the content to the page in response to the hook call.
hook_div = safe_dom.Element('div', className='gcb-html-hook',
id=re.sub('[^a-zA-Z-]', '-', name))
hook_div.add_child(tags.html_to_safe_dom(content, self))
# Mark up content to enable edit controls
if show_admin_content:
hook_div.add_attribute(onclick='gcb_edit_hook_point("%s")' % name)
hook_div.add_attribute(className='gcb-html-hook-edit')
return jinja2.Markup(hook_div.sanitized)
class ApplicationHandler(webapp2.RequestHandler):
"""A handler that is aware of the application context."""
RIGHT_LINKS = []
EXTRA_GLOBAL_CSS_URLS = []
EXTRA_GLOBAL_JS_URLS = []
@classmethod
def is_absolute(cls, url):
return bool(urlparse.urlparse(url).scheme)
@classmethod
def get_base_href(cls, handler):
"""Computes current course <base> href."""
base = handler.app_context.get_slug()
if not base.endswith('/'):
base = '%s/' % base
# For IE to work with the <base> tag, its href must be an absolute URL.
if not cls.is_absolute(base):
parts = urlparse.urlparse(handler.request.url)
base = urlparse.urlunparse(
(parts.scheme, parts.netloc, base, None, None, None))
return base
def render_template_to_html(self, template_values, template_file,
additional_dirs=None):
courses.Course.set_current(self.get_course())
models.MemcacheManager.begin_readonly()
try:
template = self.get_template(template_file, additional_dirs)
return jinja2.utils.Markup(
template.render(template_values, autoescape=True))
finally:
models.MemcacheManager.end_readonly()
courses.Course.clear_current()
def get_template(self, template_file, additional_dirs=None, prefs=None):
raise NotImplementedError()
@classmethod
def canonicalize_url_for(cls, app_context, location):
"""Adds the current namespace URL prefix to the relative 'location'."""
is_relative = (
not cls.is_absolute(location) and
not location.startswith(app_context.get_slug()))
has_slug = (
app_context.get_slug() and app_context.get_slug() != '/')
if is_relative and has_slug:
location = '%s%s' % (app_context.get_slug(), location)
return location
def canonicalize_url(self, location):
if hasattr(self, 'app_context'):
return self.canonicalize_url_for(self.app_context, location)
else:
return location
def redirect(self, location, normalize=True):
if normalize:
location = self.canonicalize_url(location)
super(ApplicationHandler, self).redirect(location)
class CourseHandler(ApplicationHandler):
"""Base handler that is aware of the current course."""
def __init__(self, *args, **kwargs):
super(CourseHandler, self).__init__(*args, **kwargs)
self.course = None
self.template_value = {}
def get_user(self):
"""Get the current user."""
return users.get_current_user()
def get_student(self):
"""Get the current student."""
user = self.get_user()
if user is None:
return None
return Student.get_by_email(user.email())
def _pick_first_valid_locale_from_list(self, desired_locales):
available_locales = self.app_context.get_allowed_locales()
for lang in desired_locales:
for available_locale in available_locales:
if lang.lower() == available_locale.lower():
return lang
return None
def get_locale_for(self, request, app_context, student=None, prefs=None):
"""Returns a locale that should be used by this request."""
if self.get_user():
# check if student has any locale labels assigned
if student is None:
student = self.get_student()
if student and student.is_enrolled and not student.is_transient:
student_label_ids = student.get_labels_of_type(
models.LabelDTO.LABEL_TYPE_LOCALE)
if student_label_ids:
all_labels = models.LabelDAO.get_all_of_type(
models.LabelDTO.LABEL_TYPE_LOCALE)
student_locales = []
for label in all_labels:
if label.type != models.LabelDTO.LABEL_TYPE_LOCALE:
continue
if label.id in student_label_ids:
student_locales.append(label.title)
locale = self._pick_first_valid_locale_from_list(
student_locales)
if locale:
return locale
# check if user preferences have been set
if prefs is None:
prefs = models.StudentPreferencesDAO.load_or_create()
if prefs is not None and prefs.locale is not None:
return prefs.locale
locale_cookie = self.request.cookies.get(GUEST_LOCALE_COOKIE)
if locale_cookie and (
locale_cookie in self.app_context.get_allowed_locales()):
return locale_cookie
# check if accept language has been set
accept_langs = request.headers.get('Accept-Language')
locale = self._pick_first_valid_locale_from_list(
[lang for lang, _ in locales.parse_accept_language(accept_langs)])
if locale:
return locale
return app_context.default_locale
def get_course(self):
"""Get current course."""
if not self.course:
self.course = Course(self)
return self.course
def get_track_matching_student(self, student):
"""Gets units whose labels match those on the student."""
return self.get_course().get_track_matching_student(student)
def get_progress_tracker(self):
"""Gets the progress tracker for the course."""
return self.get_course().get_progress_tracker()
def find_unit_by_id(self, unit_id):
"""Gets a unit with a specific id or fails with an exception."""
return self.get_course().find_unit_by_id(unit_id)
def get_units(self):
"""Gets all units in the course."""
return self.get_course().get_units()
def get_lessons(self, unit_id):
"""Gets all lessons (in order) in the specific course unit."""
return self.get_course().get_lessons(unit_id)
@classmethod
def _cache_debug_info(cls, cache):
items = []
for key, entry in cache.items.iteritems():
updated_on = None
if entry:
updated_on = entry.updated_on()
items.append('entry: %s, %s' % (key, updated_on))
return items
@classmethod
def debug_info(cls):
"""Generates a debug info for this request."""
# we only want to run import if this method is called; most of the
# it is not; we also have circular import dependencies if we were to
# put them at the top...
# pylint: disable-msg=g-import-not-at-top
from models import vfs
from modules.i18n_dashboard import i18n_dashboard
vfs_items = cls._cache_debug_info(
vfs.ProcessScopedVfsCache.instance().cache)
rb_items = cls._cache_debug_info(
i18n_dashboard.ProcessScopedResourceBundleCache.instance().cache)
return ''.join([
'\nDebug Info: %s' % datetime.datetime.utcnow(),
'\n\nServer Environment Variables: %s' % '\n'.join([
'item: %s, %s'% (key, value)
for key, value in os.environ.iteritems()]),
'\n\nVfsCacheKeys:\n%s' % '\n'.join(vfs_items),
'\n\nResourceBundlesCache:\n%s' % '\n'.join(rb_items),
])
def init_template_values(self, environ, prefs=None):
"""Initializes template variables with common values."""
self.template_value[COURSE_INFO_KEY] = environ
self.template_value[
'page_locale'] = self.app_context.get_current_locale()
self.template_value['html_hooks'] = HtmlHooks(
self.get_course(), prefs=prefs)
self.template_value['is_course_admin'] = Roles.is_course_admin(
self.app_context)
self.template_value['can_see_drafts'] = (
courses_module.courses.can_see_drafts(self.app_context))
self.template_value[
'is_read_write_course'] = self.app_context.fs.is_read_write()
self.template_value['is_super_admin'] = Roles.is_super_admin()
self.template_value[COURSE_BASE_KEY] = self.get_base_href(self)
self.template_value['right_links'] = (
[('/admin', 'Admin')] if Roles.is_super_admin() else [])
for func in self.RIGHT_LINKS:
self.template_value['right_links'].extend(func(self.app_context))
if not prefs:
prefs = models.StudentPreferencesDAO.load_or_create()
self.template_value['student_preferences'] = prefs
if (Roles.is_course_admin(self.app_context) and
not appengine_config.PRODUCTION_MODE and
prefs and prefs.show_jinja_context):
@jinja2.contextfunction
def get_context(context):
return context
self.template_value['context'] = get_context
if CAN_PUT_DEBUG_INFO_INTO_PAGES.value:
self.template_value['debug_info'] = self.debug_info()
self.template_value[
'extra_global_css_urls'] = self.EXTRA_GLOBAL_CSS_URLS
self.template_value[
'extra_global_js_urls'] = self.EXTRA_GLOBAL_JS_URLS
# Common template information for the locale picker (only shown for
# user in session)
can_student_change_locale = (
self.get_course().get_course_setting('can_student_change_locale')
or self.get_course().app_context.can_pick_all_locales())
if can_student_change_locale:
self.template_value['available_locales'] = [
{
'name': locales.get_locale_display_name(loc),
'value': loc
} for loc in self.app_context.get_allowed_locales()]
self.template_value['locale_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token(
StudentLocaleRESTHandler.XSRF_TOKEN_NAME))
self.template_value['selected_locale'] = self.get_locale_for(
self.request, self.app_context, prefs=prefs)
def get_template(self, template_file, additional_dirs=None, prefs=None):
"""Computes location of template files for the current namespace."""
_p = self.app_context.get_environ()
self.init_template_values(_p, prefs=prefs)
template_environ = self.app_context.get_template_environ(
self.app_context.get_current_locale(), additional_dirs)
template_environ.filters[
'gcb_tags'] = jinja_utils.get_gcb_tags_filter(self)
template_environ.globals.update({
'display_unit_title': (
lambda unit: display_unit_title(unit, _p)),
'display_short_unit_title': (
lambda unit: display_short_unit_title(unit, _p)),
'display_lesson_title': (
lambda unit, lesson: display_lesson_title(unit, lesson, _p))})
return template_environ.get_template(template_file)
class BaseHandler(CourseHandler):
"""Base handler."""
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self._old_locale = None
def before_method(self, verb, path):
"""Modify global locale value for the duration of this handler."""
self._old_locale = self.app_context.get_current_locale()
new_locale = self.get_locale_for(self.request, self.app_context)
self.app_context.set_current_locale(new_locale)
def after_method(self, verb, path):
"""Restore original global locale value."""
self.app_context.set_current_locale(self._old_locale)
def personalize_page_and_get_user(self):
"""If the user exists, add personalized fields to the navbar."""
user = self.get_user()
PageInitializerService.get().initialize(self.template_value)
if hasattr(self, 'app_context'):
self.template_value['can_register'] = self.app_context.get_environ(
)['reg_form']['can_register']
if user:
email = user.email()
self.template_value['email_no_domain_name'] = (
email[:email.find('@')] if '@' in email else email)
self.template_value['email'] = email
self.template_value['logoutUrl'] = (
users.create_logout_url(self.request.uri))
self.template_value['transient_student'] = False
# configure page events
self.template_value['record_tag_events'] = (
CAN_PERSIST_TAG_EVENTS.value)
self.template_value['record_page_events'] = (
CAN_PERSIST_PAGE_EVENTS.value)
self.template_value['record_events'] = (
CAN_PERSIST_ACTIVITY_EVENTS.value)
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
else:
self.template_value['loginUrl'] = users.create_login_url(
self.request.uri)
self.template_value['transient_student'] = True
return None
return user
def personalize_page_and_get_enrolled(
self, supports_transient_student=False):
"""If the user is enrolled, add personalized fields to the navbar."""
user = self.personalize_page_and_get_user()
if user is None:
student = TRANSIENT_STUDENT
else:
student = Student.get_enrolled_student_by_email(user.email())
if not student:
self.template_value['transient_student'] = True
student = TRANSIENT_STUDENT
if student.is_transient:
if supports_transient_student and (
self.app_context.get_environ()['course']['browsable']):
return TRANSIENT_STUDENT
elif user is None:
self.redirect(
users.create_login_url(self.request.uri), normalize=False
)
return None
else:
self.redirect('/preview')
return None
# Patch Student models which (for legacy reasons) do not have a user_id
# attribute set.
if not student.user_id:
student.user_id = user.user_id()
student.put()
return student
def assert_xsrf_token_or_fail(self, request, action):
"""Asserts the current request has proper XSRF token or fails."""
token = request.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
self.error(403)
return False
return True
@appengine_config.timeandlog('BaseHandler.render')
def render(self, template_file):
"""Renders a template."""
prefs = models.StudentPreferencesDAO.load_or_create()
courses.Course.set_current(self.get_course())
models.MemcacheManager.begin_readonly()
try:
template = self.get_template(template_file, prefs=prefs)
self.response.out.write(template.render(self.template_value))
finally:
models.MemcacheManager.end_readonly()
courses.Course.clear_current()
# If the page displayed successfully, save the location for registered
# students so future visits to the course's base URL sends the student
# to the most-recently-visited page.
# TODO(psimakov): method called render() must not have mutations
user = self.get_user()
if user:
student = models.Student.get_enrolled_student_by_email(
user.email())
if student:
prefs.last_location = self.request.path_qs
models.StudentPreferencesDAO.save(prefs)
def get_redirect_location(self, student):
if (not student.is_transient and
(self.request.path == self.app_context.get_slug() or
self.request.path == self.app_context.get_slug() + '/' or
self.request.get('use_last_location'))): # happens on '/' page
prefs = models.StudentPreferencesDAO.load_or_create()
# Belt-and-suspenders: prevent infinite self-redirects
if (prefs.last_location and
prefs.last_location != self.request.path_qs):
return prefs.last_location
return None
class BaseRESTHandler(CourseHandler):
"""Base REST handler."""
def __init__(self, *args, **kwargs):
super(BaseRESTHandler, self).__init__(*args, **kwargs)
def assert_xsrf_token_or_fail(self, token_dict, action, args_dict):
"""Asserts that current request has proper XSRF token or fails."""
token = token_dict.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
transforms.send_json_response(
self, 403,
'Bad XSRF token. Please reload the page and try again',
args_dict)
return False
return True
def validation_error(self, message, key=None):
"""Deliver a validation message."""
if key:
transforms.send_json_response(
self, 412, message, payload_dict={'key': key})
else:
transforms.send_json_response(self, 412, message)
class PreviewHandler(BaseHandler):
"""Handler for viewing course preview."""
def get(self):
"""Handles GET requests."""
user = self.personalize_page_and_get_user()
if user is None:
student = TRANSIENT_STUDENT
else:
student = Student.get_enrolled_student_by_email(user.email())
if not student:
student = TRANSIENT_STUDENT
# If the course is browsable, or the student is logged in and
# registered, redirect to the main course page.
if ((student and not student.is_transient) or
self.app_context.get_environ()['course']['browsable']):
self.redirect('/course')
return
self.template_value['transient_student'] = True
self.template_value['can_register'] = self.app_context.get_environ(
)['reg_form']['can_register']
self.template_value['navbar'] = {'course': True}
self.template_value['units'] = self.get_units()
self.template_value['show_registration_page'] = True
course = self.app_context.get_environ()['course']
self.template_value['video_exists'] = bool(
'main_video' in course and
'url' in course['main_video'] and
course['main_video']['url'])
self.template_value['image_exists'] = bool(
'main_image' in course and
'url' in course['main_image'] and
course['main_image']['url'])
if user:
profile = StudentProfileDAO.get_profile_by_user_id(user.user_id())
additional_registration_fields = self.app_context.get_environ(
)['reg_form']['additional_registration_fields']
if profile is not None and not additional_registration_fields:
self.template_value['show_registration_page'] = False
self.template_value['register_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('register-post'))
self.render('preview.html')
class RegisterHandler(BaseHandler):
"""Handler for course registration."""
def get(self):
"""Handles GET request."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
return
student = Student.get_enrolled_student_by_email(user.email())
if student:
self.redirect('/course')
return
can_register = self.app_context.get_environ(
)['reg_form']['can_register']
if not can_register:
self.redirect('/course#registration_closed')
return
# pre-fill nick name from the profile if available
self.template_value['current_name'] = ''
profile = StudentProfileDAO.get_profile_by_user_id(user.user_id())
if profile and profile.nick_name:
self.template_value['current_name'] = profile.nick_name
self.template_value['navbar'] = {}
self.template_value['transient_student'] = True
self.template_value['register_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('register-post'))
self.render('register.html')
def post(self):
"""Handles POST requests."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
return
if not self.assert_xsrf_token_or_fail(self.request, 'register-post'):
return
can_register = self.app_context.get_environ(
)['reg_form']['can_register']
if not can_register:
self.redirect('/course#registration_closed')
return
if 'name_from_profile' in self.request.POST.keys():
profile = StudentProfileDAO.get_profile_by_user_id(user.user_id())
name = profile.nick_name
else:
name = self.request.get('form01')
Student.add_new_student_for_current_user(
name, transforms.dumps(self.request.POST.items()), self,
labels=self.request.get('labels'))
# Render registration confirmation page
self.redirect('/course#registration_confirmation')
class ForumHandler(BaseHandler):
"""Handler for forum page."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled(
supports_transient_student=True)
if not student:
return
self.template_value['navbar'] = {'forum': True}
self.render('forum.html')
class StudentProfileHandler(BaseHandler):
"""Handles the click to 'Progress' link in the nav bar."""
# A list of functions which will provide extra rows in the Student Progress
# table. Each function will be passed the current handler, student, and
# course object and should return a pair of strings; the first being the
# title of the data and the second the value to display.
EXTRA_STUDENT_DATA_PROVIDERS = []
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
track_labels = models.LabelDAO.get_all_of_type(
models.LabelDTO.LABEL_TYPE_COURSE_TRACK)
course = self.get_course()
units = []
for unit in course.get_units():
# Don't show assessments that are part of units.
if course.get_parent_unit(unit.unit_id):
continue
units.append({
'unit_id': unit.unit_id,
'title': unit.title,
'labels': list(course.get_unit_track_labels(unit)),
})
name = student.name
profile = student.profile
if profile:
name = profile.nick_name
student_labels = student.get_labels_of_type(
models.LabelDTO.LABEL_TYPE_COURSE_TRACK)
self.template_value['navbar'] = {'progress': True}
self.template_value['student'] = student
self.template_value['student_name'] = name
self.template_value['date_enrolled'] = student.enrolled_on.strftime(
HUMAN_READABLE_DATE_FORMAT)
self.template_value['score_list'] = course.get_all_scores(student)
self.template_value['overall_score'] = course.get_overall_score(student)
self.template_value['student_edit_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-edit'))
self.template_value['can_edit_name'] = (
not models.CAN_SHARE_STUDENT_PROFILE.value)
self.template_value['track_labels'] = track_labels
self.template_value['student_labels'] = student_labels
self.template_value['units'] = units
self.template_value['track_env'] = transforms.dumps({
'label_ids': [label.id for label in track_labels],
'units': units
})
# Append any extra data which is provided by modules
extra_student_data = []
for data_provider in self.EXTRA_STUDENT_DATA_PROVIDERS:
extra_student_data.append(data_provider(self, student, course))
self.template_value['extra_student_data'] = extra_student_data
self.render('student_profile.html')
class StudentEditStudentHandler(BaseHandler):
"""Handles edits to student records by students."""
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-edit'):
return
Student.rename_current(self.request.get('name'))
self.redirect('/student/home')
class StudentSetTracksHandler(BaseHandler):
"""Handles submission of student tracks selections."""
def post(self):
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-edit'):
return
all_track_label_ids = models.LabelDAO.get_set_of_ids_of_type(
models.LabelDTO.LABEL_TYPE_COURSE_TRACK)
new_track_label_ids = set(
[int(label_id)
for label_id in self.request.get_all('labels')
if label_id and int(label_id) in all_track_label_ids])
student_label_ids = set(
[int(label_id)
for label_id in common_utils.text_to_list(student.labels)
if label_id])
# Remove all existing track (and only track) labels from student,
# then merge in selected set from form.
student_label_ids = student_label_ids.difference(all_track_label_ids)
student_label_ids = student_label_ids.union(new_track_label_ids)
models.Student.set_labels_for_current(
common_utils.list_to_text(list(student_label_ids)))
self.redirect('/student/home')
class StudentUnenrollHandler(BaseHandler):
"""Handler for students to unenroll themselves."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
self.template_value['student'] = student
self.template_value['navbar'] = {}
self.template_value['student_unenroll_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-unenroll'))
self.render('unenroll_confirmation_check.html')
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-unenroll'):
return
Student.set_enrollment_status_for_current(False)
self.template_value['navbar'] = {}
self.template_value['transient_student'] = True
self.render('unenroll_confirmation.html')
class StudentLocaleRESTHandler(BaseRESTHandler):
"""REST handler to manage student setting their preferred locale."""
XSRF_TOKEN_NAME = 'locales'
def post(self):
request = transforms.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(
request, self.XSRF_TOKEN_NAME, {}):
return
selected = request['payload']['selected']
if selected not in self.app_context.get_allowed_locales():
transforms.send_json_response(self, 401, 'Bad locale')
return
prefs = models.StudentPreferencesDAO.load_or_create()
if prefs:
# Store locale in StudentPreferences for logged-in users
prefs.locale = selected
models.StudentPreferencesDAO.save(prefs)
else:
# Store locale in cookie for out-of-session users
self.response.set_cookie(
GUEST_LOCALE_COOKIE, selected,
max_age=GUEST_LOCALE_COOKIE_MAX_AGE_SEC)
transforms.send_json_response(self, 200, 'OK')
| |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Vaultcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Vaultcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| |
def pr_iterator(array):
pr = None
for i in array:
yield i, pr
pr = i
def ex(ex):
raise ex
import math
class Calculator():
__slots__ = ("operations", "special")
default_operations = {
",": (1, lambda x, y: (x, y)),
"+": (3, lambda x, y: x + y),
"-": (3, lambda x, y: x - y),
"unary +": (3, lambda x: x),
"unary -": (3, lambda x: -x),
"*": (6, lambda x, y: x * y),
"/": (6, lambda x, y: x / y if y != 0 else float("inf")),
"^": (9, lambda x, y: x ** y),
"unary sqrt": (9, lambda x: x ** 0.5 if x >= 0 else ex(ValueError("Square root of a negative number."))),
"unary pow": (9, lambda x: x[0]**x[1]),
"unary fact": (9, math.factorial),
"unary sin": (9, math.sin),
"unary cos": (9, math.cos),
"unary log": (9, lambda x: (math.log(x) if x > 0 else ex(ValueError("Value x can't be zero for log!")))
if len(x) == 1 or isinstance(x, (int, float))
else math.log(x[0], x[1]) if x[0] > 0 and x[1] > 0 and x[1] != 1 else
ex(ValueError("Values for can't be zero (and one for base) for log!"))),
}
default_variables = {"pi": 3.14159265359, "e": 2.71828182846}
def __init__(self, operations=None):
"""Plugin for calculating user provided expressions"""
if operations is None:
operations = self.default_operations
if None in operations:
operations.update(self.default_operations)
self.operations = operations
self.special = ["(", ")", ","]
def unary_place(self, pr_token):
return pr_token is None or pr_token == "(" or pr_token in self.operations
@staticmethod
def isfloat(v):
try:
float(str(v))
return True
except ValueError:
return False
def tokenized(self, expr, **variables):
return list(self.tokenize(expr, **variables))
def tokenize(self, expr, **variables):
token = ""
token_pr = None
for s in expr:
if s in (" ", "\n"):
if token:
yield token
token_pr = token
token = ""
continue
if token and self.isfloat(token) and not self.isfloat(token + s):
yield token
token_pr = token
token = ""
token += s
if token in self.operations or token in self.special or token in variables \
or (self.unary_place(token_pr) and f"unary {token}" in self.operations):
yield token
token_pr = token
token = ""
if token:
yield token
def infix_to_postfixed(self, expr, **variables):
return list(self.infix_to_postfix(expr, **variables))
def infix_to_postfix(self, expr, **variables):
if isinstance(expr, (str, )):
expr = self.tokenize(expr, **variables)
p = lambda x: self.operations.get(x, (0, 0))[0]
stack = []
top = lambda: stack[-1]
for token, pr_token in pr_iterator(expr):
if token == ")":
while stack:
current_top = stack.pop(-1)
if current_top == "(": break
yield current_top
else:
raise ValueError("Unbalanced brackets")
continue
if token == "(":
stack.append(token)
continue
if self.unary_place(pr_token) and f"unary {token}" in self.operations:
token = f"unary {token}"
elif token not in self.operations:
yield token
continue
while stack and p(top()) >= p(token):
yield stack.pop(-1)
stack.append(token)
while stack:
yield stack.pop(-1)
def calculate_safe(self, expr, **variables):
try:
return True, self.calculate(expr, **variables)
except ValueError:
return False, 0
@staticmethod
def prepare_token(token):
# You can just return `token` it will turn calculator to small programming language
if isinstance(token, (int, float)):
return token
if isinstance(token, str):
return float(token)
if isinstance(token, (list, tuple)):
return token
return float(str(token))
def calculate(self, expr, **variables):
variables.update(self.default_variables)
postfix = self.infix_to_postfix(expr, **variables)
stack = []
for token in postfix:
if token not in self.operations:
if token in variables:
if callable(variables[token]):
stack.append(variables[token]())
else:
stack.append(variables[token])
else:
stack.append(token)
continue
if token.startswith("unary "):
r = self.operations[token][1](self.prepare_token(stack.pop(-1)))
if isinstance(r, float):
stack.append(round(r, 10))
else:
stack.append(r)
else:
a = self.prepare_token(stack.pop(-1))
b = self.prepare_token(stack.pop(-1))
r = self.operations[token][1](b, a)
if isinstance(r, float):
stack.append(round(r, 10))
else:
stack.append(r)
if len(stack) > 1:
raise ValueError("Unbalanced expression")
res = stack.pop(0)
if isinstance(res, float):
return round(res, 10)
return res
| |
import os
from mock import Mock
from conans.client.generators import MakeGenerator
from conans.model.build_info import CppInfo
from conans.model.conan_file import ConanFile
from conans.model.env_info import EnvValues
from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.test_files import temp_folder
from conans.util.files import save
def test_make_generator():
tmp_folder1 = temp_folder()
tmp_folder2 = temp_folder()
save(os.path.join(tmp_folder1, "include1", "file.h"), "")
save(os.path.join(tmp_folder2, "include2", "file.h"), "")
save(os.path.join(tmp_folder1, "lib1", "file.a"), "")
save(os.path.join(tmp_folder2, "lib2", "file.a"), "")
save(os.path.join(tmp_folder1, "bin1", "file.bin"), "")
save(os.path.join(tmp_folder2, "bin2", "file.bin"), "")
save(os.path.join(tmp_folder1, "SystemFrameworks", "file.bin"), "")
conanfile = ConanFile(Mock(), None)
conanfile.initialize(Settings({}), EnvValues())
ref = ConanFileReference.loads("MyPkg1/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, tmp_folder1)
cpp_info.defines = ["MYDEFINE1"]
cpp_info.includedirs = ['include1']
cpp_info.libdirs = ['lib1']
cpp_info.libs = ['libfoo']
cpp_info.bindirs = ['bin1']
cpp_info.version = "0.1"
cpp_info.cflags = ['-fgimple']
cpp_info.cxxflags = ['-fdollars-in-identifiers']
cpp_info.sharedlinkflags = ['-framework Cocoa']
cpp_info.exelinkflags = ['-framework QuartzCore']
cpp_info.frameworks = ['AudioUnit']
cpp_info.frameworkdirs = ['SystemFrameworks']
cpp_info.system_libs = ["system_lib1"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg2/3.2.3@lasote/stables")
cpp_info = CppInfo(ref.name, tmp_folder2)
cpp_info.defines = ["MYDEFINE2"]
cpp_info.includedirs = ['include2']
cpp_info.libdirs = ['lib2']
cpp_info.libs = ['libbar']
cpp_info.bindirs = ['bin2']
cpp_info.version = "3.2.3"
cpp_info.cflags = ['-fno-asm']
cpp_info.cxxflags = ['-pthread']
cpp_info.sharedlinkflags = ['-framework AudioFoundation']
cpp_info.exelinkflags = ['-framework VideoToolbox']
cpp_info.system_libs = ["system_lib2"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
generator = MakeGenerator(conanfile)
content = generator.content
content_template = """
CONAN_ROOT_MYPKG1 ?= \\
{conan_root_mypkg1}
CONAN_SYSROOT_MYPKG1 ?= \\
CONAN_INCLUDE_DIRS_MYPKG1 += \\
{conan_include_dirs_mypkg1}
CONAN_LIB_DIRS_MYPKG1 += \\
{conan_lib_dirs_mypkg1}
CONAN_BIN_DIRS_MYPKG1 += \\
{conan_bin_dirs_mypkg1}
CONAN_BUILD_DIRS_MYPKG1 += \\
{conan_build_dirs_mypkg1}/
CONAN_RES_DIRS_MYPKG1 +=
CONAN_LIBS_MYPKG1 += \\
libfoo
CONAN_SYSTEM_LIBS_MYPKG1 += \\
system_lib1
CONAN_DEFINES_MYPKG1 += \\
MYDEFINE1
CONAN_CFLAGS_MYPKG1 += \\
-fgimple
CONAN_CXXFLAGS_MYPKG1 += \\
-fdollars-in-identifiers
CONAN_SHAREDLINKFLAGS_MYPKG1 += \\
-framework Cocoa
CONAN_EXELINKFLAGS_MYPKG1 += \\
-framework QuartzCore
CONAN_FRAMEWORKS_MYPKG1 += \\
AudioUnit
CONAN_FRAMEWORK_PATHS_MYPKG1 += \\
{conan_framework_dirs_mypkg1}/SystemFrameworks
CONAN_ROOT_MYPKG2 ?= \\
{conan_root_mypkg2}
CONAN_SYSROOT_MYPKG2 ?= \\
CONAN_INCLUDE_DIRS_MYPKG2 += \\
{conan_include_dirs_mypkg2}
CONAN_LIB_DIRS_MYPKG2 += \\
{conan_lib_dirs_mypkg2}
CONAN_BIN_DIRS_MYPKG2 += \\
{conan_bin_dirs_mypkg2}
CONAN_BUILD_DIRS_MYPKG2 += \\
{conan_build_dirs_mypkg2}/
CONAN_RES_DIRS_MYPKG2 +=
CONAN_LIBS_MYPKG2 += \\
libbar
CONAN_SYSTEM_LIBS_MYPKG2 += \\
system_lib2
CONAN_DEFINES_MYPKG2 += \\
MYDEFINE2
CONAN_CFLAGS_MYPKG2 += \\
-fno-asm
CONAN_CXXFLAGS_MYPKG2 += \\
-pthread
CONAN_SHAREDLINKFLAGS_MYPKG2 += \\
-framework AudioFoundation
CONAN_EXELINKFLAGS_MYPKG2 += \\
-framework VideoToolbox
CONAN_FRAMEWORKS_MYPKG2 +=
CONAN_FRAMEWORK_PATHS_MYPKG2 +=
CONAN_ROOT += \\
$(CONAN_ROOT_MYPKG1) \\
$(CONAN_ROOT_MYPKG2)
CONAN_SYSROOT += \\
$(CONAN_SYSROOT_MYPKG1) \\
$(CONAN_SYSROOT_MYPKG2)
CONAN_INCLUDE_DIRS += \\
$(CONAN_INCLUDE_DIRS_MYPKG1) \\
$(CONAN_INCLUDE_DIRS_MYPKG2)
CONAN_LIB_DIRS += \\
$(CONAN_LIB_DIRS_MYPKG1) \\
$(CONAN_LIB_DIRS_MYPKG2)
CONAN_BIN_DIRS += \\
$(CONAN_BIN_DIRS_MYPKG1) \\
$(CONAN_BIN_DIRS_MYPKG2)
CONAN_BUILD_DIRS += \\
$(CONAN_BUILD_DIRS_MYPKG1) \\
$(CONAN_BUILD_DIRS_MYPKG2)
CONAN_RES_DIRS += \\
$(CONAN_RES_DIRS_MYPKG1) \\
$(CONAN_RES_DIRS_MYPKG2)
CONAN_LIBS += \\
$(CONAN_LIBS_MYPKG1) \\
$(CONAN_LIBS_MYPKG2)
CONAN_DEFINES += \\
$(CONAN_DEFINES_MYPKG1) \\
$(CONAN_DEFINES_MYPKG2)
CONAN_CFLAGS += \\
$(CONAN_CFLAGS_MYPKG1) \\
$(CONAN_CFLAGS_MYPKG2)
CONAN_CXXFLAGS += \\
$(CONAN_CXXFLAGS_MYPKG1) \\
$(CONAN_CXXFLAGS_MYPKG2)
CONAN_SHAREDLINKFLAGS += \\
$(CONAN_SHAREDLINKFLAGS_MYPKG1) \\
$(CONAN_SHAREDLINKFLAGS_MYPKG2)
CONAN_EXELINKFLAGS += \\
$(CONAN_EXELINKFLAGS_MYPKG1) \\
$(CONAN_EXELINKFLAGS_MYPKG2)
CONAN_FRAMEWORKS += \\
$(CONAN_FRAMEWORKS_MYPKG1) \\
$(CONAN_FRAMEWORKS_MYPKG2)
CONAN_FRAMEWORK_PATHS += \\
$(CONAN_FRAMEWORK_PATHS_MYPKG1) \\
$(CONAN_FRAMEWORK_PATHS_MYPKG2)
"""
root1 = tmp_folder1.replace('\\', '/')
root2 = tmp_folder2.replace('\\', '/')
inc1 = os.path.join(tmp_folder1, 'include1').replace('\\', '/')
inc2 = os.path.join(tmp_folder2, 'include2').replace('\\', '/')
lib1 = os.path.join(tmp_folder1, 'lib1').replace('\\', '/')
lib2 = os.path.join(tmp_folder2, 'lib2').replace('\\', '/')
bin1 = os.path.join(tmp_folder1, 'bin1').replace('\\', '/')
bin2 = os.path.join(tmp_folder2, 'bin2').replace('\\', '/')
expected_content = content_template.format(conan_root_mypkg1=root1,
conan_include_dirs_mypkg1=inc1,
conan_lib_dirs_mypkg1=lib1,
conan_bin_dirs_mypkg1=bin1,
conan_build_dirs_mypkg1=root1,
conan_root_mypkg2=root2,
conan_include_dirs_mypkg2=inc2,
conan_lib_dirs_mypkg2=lib2,
conan_bin_dirs_mypkg2=bin2,
conan_build_dirs_mypkg2=root2,
conan_framework_dirs_mypkg1=root1)
content = "\n".join(line.strip() for line in content.splitlines()) # Trailing spaces
assert expected_content in content
| |
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Watcher test utilities."""
from oslo_utils import timeutils
from watcher.db import api as db_api
from watcher.db.sqlalchemy import models
from watcher import objects
def id_generator():
id_ = 1
while True:
yield id_
id_ += 1
def _load_relationships(model, db_data):
rel_data = {}
relationships = db_api.get_instance()._get_relationships(model)
for name, relationship in relationships.items():
related_model = relationship.argument
if not db_data.get(name):
rel_data[name] = None
else:
rel_data[name] = related_model(**db_data.get(name))
return rel_data
def get_test_audit_template(**kwargs):
audit_template_data = {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', 'e74c40e0-d825-11e2-a28f-0800200c9a66'),
'goal_id': kwargs.get('goal_id', 1),
'strategy_id': kwargs.get('strategy_id', None),
'name': kwargs.get('name', 'My Audit Template'),
'description': kwargs.get('description', 'Desc. Of My Audit Template'),
'scope': kwargs.get('scope', []),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
# ObjectField doesn't allow None nor dict, so if we want to simulate a
# non-eager object loading, the field should not be referenced at all.
audit_template_data.update(
_load_relationships(models.AuditTemplate, kwargs))
return audit_template_data
def create_test_audit_template(**kwargs):
"""Create test audit template entry in DB and return AuditTemplate DB object.
Function to be used to create test AuditTemplate objects in the database.
:param kwargs: kwargsargs with overriding values for audit template's
attributes.
:returns: Test AuditTemplate DB object.
"""
audit_template = get_test_audit_template(**kwargs)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kwargs:
del audit_template['id']
dbapi = db_api.get_instance()
return dbapi.create_audit_template(audit_template)
def get_test_audit(**kwargs):
audit_data = {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', '10a47dd1-4874-4298-91cf-eff046dbdb8d'),
'name': kwargs.get('name', 'My Audit'),
'audit_type': kwargs.get('audit_type', 'ONESHOT'),
'state': kwargs.get('state', objects.audit.State.PENDING),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
'parameters': kwargs.get('parameters', {}),
'interval': kwargs.get('interval', '3600'),
'goal_id': kwargs.get('goal_id', 1),
'strategy_id': kwargs.get('strategy_id', None),
'scope': kwargs.get('scope', []),
'auto_trigger': kwargs.get('auto_trigger', False),
'next_run_time': kwargs.get('next_run_time'),
'hostname': kwargs.get('hostname', 'host_1'),
'start_time': kwargs.get('start_time'),
'end_time': kwargs.get('end_time'),
'force': kwargs.get('force', False)
}
# ObjectField doesn't allow None nor dict, so if we want to simulate a
# non-eager object loading, the field should not be referenced at all.
audit_data.update(_load_relationships(models.Audit, kwargs))
return audit_data
def create_test_audit(**kwargs):
"""Create test audit entry in DB and return Audit DB object.
Function to be used to create test Audit objects in the database.
:param kwargs: kwargsargs with overriding values for audit's attributes.
:returns: Test Audit DB object.
"""
audit = get_test_audit(**kwargs)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kwargs:
del audit['id']
dbapi = db_api.get_instance()
return dbapi.create_audit(audit)
def get_test_action(**kwargs):
action_data = {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', '10a47dd1-4874-4298-91cf-eff046dbdb8d'),
'action_plan_id': kwargs.get('action_plan_id', 1),
'action_type': kwargs.get('action_type', 'nop'),
'input_parameters':
kwargs.get('input_parameters',
{'key1': 'val1',
'key2': 'val2',
'resource_id':
'10a47dd1-4874-4298-91cf-eff046dbdb8d'}),
'state': kwargs.get('state', objects.action_plan.State.PENDING),
'parents': kwargs.get('parents', []),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
# ObjectField doesn't allow None nor dict, so if we want to simulate a
# non-eager object loading, the field should not be referenced at all.
action_data.update(_load_relationships(models.Action, kwargs))
return action_data
def create_test_action(**kwargs):
"""Create test action entry in DB and return Action DB object.
Function to be used to create test Action objects in the database.
:param kwargs: kwargsargs with overriding values for action's attributes.
:returns: Test Action DB object.
"""
action = get_test_action(**kwargs)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kwargs:
del action['id']
dbapi = db_api.get_instance()
return dbapi.create_action(action)
def get_test_action_plan(**kwargs):
action_plan_data = {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', '76be87bd-3422-43f9-93a0-e85a577e3061'),
'state': kwargs.get('state', objects.action_plan.State.ONGOING),
'audit_id': kwargs.get('audit_id', 1),
'strategy_id': kwargs.get('strategy_id', 1),
'global_efficacy': kwargs.get('global_efficacy', []),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
'hostname': kwargs.get('hostname', 'host_1'),
}
# ObjectField doesn't allow None nor dict, so if we want to simulate a
# non-eager object loading, the field should not be referenced at all.
action_plan_data.update(_load_relationships(models.ActionPlan, kwargs))
return action_plan_data
def create_test_action_plan(**kwargs):
"""Create test action plan entry in DB and return Action Plan DB object.
Function to be used to create test Action objects in the database.
:param kwargs: kwargsargs with overriding values for action's attributes.
:returns: Test Action DB object.
"""
action = get_test_action_plan(**kwargs)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kwargs:
del action['id']
dbapi = db_api.get_instance()
return dbapi.create_action_plan(action)
def get_test_goal(**kwargs):
return {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', 'f7ad87ae-4298-91cf-93a0-f35a852e3652'),
'name': kwargs.get('name', 'TEST'),
'display_name': kwargs.get('display_name', 'test goal'),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
'efficacy_specification': kwargs.get('efficacy_specification', []),
}
def create_test_goal(**kwargs):
"""Create test goal entry in DB and return Goal DB object.
Function to be used to create test Goal objects in the database.
:param kwargs: kwargs which override default goal values of its attributes.
:returns: Test Goal DB object.
"""
goal = get_test_goal(**kwargs)
dbapi = db_api.get_instance()
return dbapi.create_goal(goal)
def get_test_scoring_engine(**kwargs):
return {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', 'e8370ede-4f39-11e6-9ffa-08002722cb21'),
'name': kwargs.get('name', 'test-se-01'),
'description': kwargs.get('description', 'test scoring engine 01'),
'metainfo': kwargs.get('metainfo', 'test_attr=test_val'),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
def create_test_scoring_engine(**kwargs):
"""Create test scoring engine in DB and return ScoringEngine DB object.
Function to be used to create test ScoringEngine objects in the database.
:param kwargs: kwargs with overriding values for SE'sattributes.
:returns: Test ScoringEngine DB object.
"""
scoring_engine = get_test_scoring_engine(**kwargs)
dbapi = db_api.get_instance()
return dbapi.create_scoring_engine(scoring_engine)
def get_test_strategy(**kwargs):
strategy_data = {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', 'cb3d0b58-4415-4d90-b75b-1e96878730e3'),
'name': kwargs.get('name', 'TEST'),
'display_name': kwargs.get('display_name', 'test strategy'),
'goal_id': kwargs.get('goal_id', 1),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
'parameters_spec': kwargs.get('parameters_spec', {}),
}
# ObjectField doesn't allow None nor dict, so if we want to simulate a
# non-eager object loading, the field should not be referenced at all.
strategy_data.update(_load_relationships(models.Strategy, kwargs))
return strategy_data
def get_test_service(**kwargs):
return {
'id': kwargs.get('id', 1),
'name': kwargs.get('name', 'watcher-service'),
'host': kwargs.get('host', 'controller'),
'last_seen_up': kwargs.get(
'last_seen_up',
timeutils.parse_isotime('2016-09-22T08:32:06').replace(tzinfo=None)
),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
def create_test_service(**kwargs):
"""Create test service entry in DB and return Service DB object.
Function to be used to create test Service objects in the database.
:param kwargs: kwargs with overriding values for service's attributes.
:returns: Test Service DB object.
"""
service = get_test_service(**kwargs)
dbapi = db_api.get_instance()
return dbapi.create_service(service)
def create_test_strategy(**kwargs):
"""Create test strategy entry in DB and return Strategy DB object.
Function to be used to create test Strategy objects in the database.
:param kwargs: kwargs with overriding values for strategy's attributes.
:returns: Test Strategy DB object.
"""
strategy = get_test_strategy(**kwargs)
dbapi = db_api.get_instance()
return dbapi.create_strategy(strategy)
def get_test_efficacy_indicator(**kwargs):
return {
'id': kwargs.get('id', 1),
'uuid': kwargs.get('uuid', '202cfcf9-811c-411a-8a35-d8351f64eb24'),
'name': kwargs.get('name', 'test_indicator'),
'description': kwargs.get('description', 'Test indicator'),
'unit': kwargs.get('unit', '%'),
'value': kwargs.get('value', 0),
'action_plan_id': kwargs.get('action_plan_id', 1),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
def create_test_efficacy_indicator(**kwargs):
"""Create and return a test efficacy indicator entry in DB.
Function to be used to create test EfficacyIndicator objects in the DB.
:param kwargs: kwargs for overriding the values of the attributes
:returns: Test EfficacyIndicator DB object.
"""
efficacy_indicator = get_test_efficacy_indicator(**kwargs)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kwargs:
del efficacy_indicator['id']
dbapi = db_api.get_instance()
return dbapi.create_efficacy_indicator(efficacy_indicator)
def get_test_action_desc(**kwargs):
return {
'id': kwargs.get('id', 1),
'action_type': kwargs.get('action_type', 'nop'),
'description': kwargs.get('description', 'Logging a NOP message'),
'created_at': kwargs.get('created_at'),
'updated_at': kwargs.get('updated_at'),
'deleted_at': kwargs.get('deleted_at'),
}
def create_test_action_desc(**kwargs):
"""Create test action description entry in DB and return ActionDescription.
Function to be used to create test ActionDescription objects in the DB.
:param kwargs: kwargs with overriding values for service's attributes.
:returns: Test ActionDescription DB object.
"""
action_desc = get_test_action_desc(**kwargs)
dbapi = db_api.get_instance()
return dbapi.create_action_description(action_desc)
| |
import collections
import contextlib
import errno
import hashlib
import json
import os
import io
import re
import shutil
import signal
import stat
import struct
import tarfile
import tempfile
import zipfile
import fcntl
from django import forms
from django.conf import settings
from django.core.files.storage import File as DjangoFile, default_storage as storage
from django.template.defaultfilters import filesizeformat
from django.utils.encoding import force_str
from django.utils.jslex import JsLexer
from django.utils.translation import gettext
import olympia.core.logger
from olympia import amo
from olympia.access import acl
from olympia.addons.utils import verify_mozilla_trademark
from olympia.amo.utils import decode_json, find_language, rm_local_tmp_dir
from olympia.applications.models import AppVersion
from olympia.lib.crypto.signing import get_signer_organizational_unit_name
from olympia.lib import unicodehelper
from olympia.versions.compare import VersionString
log = olympia.core.logger.getLogger('z.files.utils')
class ParseError(forms.ValidationError):
pass
VERSION_RE = re.compile(r'^[-+*.\w]{,32}$')
SIGNED_RE = re.compile(r'^META\-INF/(\w+)\.(rsa|sf)$')
# This is essentially what Firefox matches
# (see toolkit/components/extensions/ExtensionUtils.jsm)
MSG_RE = re.compile(r'__MSG_(?P<msgid>[a-zA-Z0-9@_]+?)__')
# The default update URL.
default = (
'https://versioncheck.addons.mozilla.org/update/VersionCheck.php?'
'reqVersion=%REQ_VERSION%&id=%ITEM_ID%&version=%ITEM_VERSION%&'
'maxAppVersion=%ITEM_MAXAPPVERSION%&status=%ITEM_STATUS%&appID=%APP_ID%&'
'appVersion=%APP_VERSION%&appOS=%APP_OS%&appABI=%APP_ABI%&'
'locale=%APP_LOCALE%¤tAppVersion=%CURRENT_APP_VERSION%&'
'updateType=%UPDATE_TYPE%'
)
def get_filepath(fileorpath):
"""Resolve the actual file path of `fileorpath`.
This supports various input formats, a path, a django `File` object,
`olympia.files.File`, a `FileUpload` or just a regular file-like object.
"""
if isinstance(fileorpath, str):
return fileorpath
elif isinstance(fileorpath, DjangoFile):
return fileorpath
elif hasattr(fileorpath, 'file_path'): # File
return fileorpath.file_path
elif hasattr(fileorpath, 'path'): # FileUpload
return fileorpath.path
elif hasattr(fileorpath, 'name'): # file-like object
return fileorpath.name
return fileorpath
def id_to_path(pk):
"""
Generate a path from an id, to distribute folders in the file system.
1 => 1/1/1
12 => 2/12/12
123456 => 6/56/123456
"""
pk = str(pk)
path = [pk[-1]]
if len(pk) >= 2:
path.append(pk[-2:])
else:
path.append(pk)
path.append(pk)
return os.path.join(*path)
def get_file(fileorpath):
"""Get a file-like object, whether given a FileUpload object or a path."""
if hasattr(fileorpath, 'path'): # FileUpload
return storage.open(fileorpath.path, 'rb')
if hasattr(fileorpath, 'name'):
return fileorpath
return storage.open(fileorpath, 'rb')
def make_xpi(files):
file_obj = io.BytesIO()
zip_file = zipfile.ZipFile(file_obj, 'w')
for path, data in files.items():
zip_file.writestr(path, data)
zip_file.close()
file_obj.seek(0)
return file_obj
class UnsupportedFileType(forms.ValidationError):
pass
class NoManifestFound(forms.ValidationError):
pass
class InvalidManifest(forms.ValidationError):
pass
class InvalidZipFile(forms.ValidationError):
"""This error is raised when we attempt to open an invalid file with SafeZip."""
pass
class Extractor:
"""Extract add-on info from a manifest file."""
App = collections.namedtuple('App', 'appdata id min max')
@classmethod
def parse(cls, xpi_fobj, minimal=False):
zip_file = SafeZip(xpi_fobj)
certificate = os.path.join('META-INF', 'mozilla.rsa')
certificate_info = None
if zip_file.exists(certificate):
certificate_info = SigningCertificateInformation(zip_file.read(certificate))
if zip_file.exists('manifest.json'):
data = ManifestJSONExtractor(zip_file, certinfo=certificate_info).parse(
minimal=minimal
)
else:
raise NoManifestFound('No manifest.json found')
return data
def get_appversions(app, min_version, max_version):
"""Return the `AppVersion`s that correspond to the given versions."""
qs = AppVersion.objects.filter(application=app.id)
min_appver = qs.get(version=min_version)
max_appver = qs.get(version=max_version)
return min_appver, max_appver
def get_simple_version(version_string):
"""Extract the version number without the ><= requirements, returning a
VersionString instance.
This simply extracts the version number without the ><= requirement so
it will not be accurate for version requirements that are not >=, <= or
= to a version.
>>> get_simple_version('>=33.0a1')
VersionString('33.0a1')
"""
if not version_string:
version_string = ''
return VersionString(re.sub('[<=>]', '', version_string))
class ManifestJSONExtractor:
def __init__(self, zip_file, data='', certinfo=None):
self.zip_file = zip_file
self.certinfo = certinfo
if not data:
data = zip_file.read('manifest.json')
# Remove BOM if present.
data = unicodehelper.decode(data)
# Run through the JSON and remove all comments, then try to read
# the manifest file.
# Note that Firefox and the WebExtension spec only allow for
# line comments (starting with `//`), not block comments (starting with
# `/*`). We strip out both in AMO because the linter will flag the
# block-level comments explicitly as an error (so the developer can
# change them to line-level comments).
#
# But block level comments are not allowed. We just flag them elsewhere
# (in the linter).
json_string = ''
lexer = JsLexer()
for name, token in lexer.lex(data):
if name not in ('blockcomment', 'linecomment'):
json_string += token
try:
self.data = json.loads(json_string)
except Exception:
raise InvalidManifest(gettext('Could not parse the manifest file.'))
def get(self, key, default=None):
return self.data.get(key, default)
@property
def homepage(self):
homepage_url = self.get('homepage_url')
# `developer.url` in the manifest overrides `homepage_url`.
return self.get('developer', {}).get('url', homepage_url)
@property
def is_experiment(self):
"""Return whether or not the webextension uses
experiments or theme experiments API.
In legacy extensions this is a different type, but for webextensions
we just look at the manifest."""
experiment_keys = ('experiment_apis', 'theme_experiment')
return any(bool(self.get(key)) for key in experiment_keys)
@property
def gecko(self):
"""Return the "applications|browser_specific_settings["gecko"]" part
of the manifest."""
parent_block = self.get(
'browser_specific_settings', self.get('applications', {})
)
return parent_block.get('gecko', {})
@property
def guid(self):
return str(self.gecko.get('id', None) or '') or None
@property
def type(self):
return (
amo.ADDON_LPAPP
if 'langpack_id' in self.data
else amo.ADDON_STATICTHEME
if 'theme' in self.data
else amo.ADDON_DICT
if 'dictionaries' in self.data
else amo.ADDON_SITE_PERMISSION
if 'site_permissions' in self.data
else amo.ADDON_EXTENSION
)
@property
def strict_max_version(self):
return get_simple_version(self.gecko.get('strict_max_version'))
@property
def strict_min_version(self):
return get_simple_version(self.gecko.get('strict_min_version'))
@property
def install_origins(self):
value = self.get('install_origins', [])
# We will be processing install_origins regardless of validation
# status, so it can be invalid. We need to ensure it's a list of
# strings at least, to prevent exceptions later.
return (
list(filter(lambda item: isinstance(item, str), value))
if isinstance(value, list)
else []
)
def apps(self):
"""Get `AppVersion`s for the application."""
type_ = self.type
if type_ == amo.ADDON_LPAPP:
# Langpack are only compatible with Firefox desktop at the moment.
# https://github.com/mozilla/addons-server/issues/8381
# They are all strictly compatible with a specific version, so
# the default min version here doesn't matter much.
apps = ((amo.FIREFOX, amo.DEFAULT_WEBEXT_MIN_VERSION),)
elif type_ == amo.ADDON_STATICTHEME:
# Static themes are only compatible with Firefox desktop >= 53
# and Firefox for Android >=65.
apps = (
(amo.FIREFOX, amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX),
(amo.ANDROID, amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID),
)
elif type_ == amo.ADDON_DICT:
# WebExt dicts are only compatible with Firefox desktop >= 61.
apps = ((amo.FIREFOX, amo.DEFAULT_WEBEXT_DICT_MIN_VERSION_FIREFOX),)
else:
webext_min = (
amo.DEFAULT_WEBEXT_MIN_VERSION
if self.get('browser_specific_settings', None) is None
else amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC
)
# amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC should be 48.0,
# which is the same as amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID, so
# no specific treatment for Android.
apps = (
(amo.FIREFOX, webext_min),
(amo.ANDROID, amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID),
)
if self.get('manifest_version') == 3:
# Update minimum supported versions if it's an mv3 addon.
mv3_mins = {
amo.FIREFOX: amo.DEFAULT_WEBEXT_MIN_VERSION_MV3_FIREFOX,
amo.ANDROID: amo.DEFAULT_WEBEXT_MIN_VERSION_MV3_ANDROID,
}
apps = (
(app, max(VersionString(ver), mv3_mins.get(app, mv3_mins[amo.FIREFOX])))
for app, ver in apps
)
doesnt_support_no_id = (
self.strict_min_version
and self.strict_min_version
< VersionString(amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID)
)
if self.guid is None and doesnt_support_no_id:
raise forms.ValidationError(
gettext('Add-on ID is required for Firefox 47 and below.')
)
# If a minimum strict version is specified, it needs to be higher
# than the version when Firefox started supporting WebExtensions.
unsupported_no_matter_what = (
self.strict_min_version
and self.strict_min_version < VersionString(amo.DEFAULT_WEBEXT_MIN_VERSION)
)
if unsupported_no_matter_what:
msg = gettext('Lowest supported "strict_min_version" is 42.0.')
raise forms.ValidationError(msg)
for app, default_min_version in apps:
if self.guid is None and not self.strict_min_version:
strict_min_version = max(
VersionString(amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID),
VersionString(default_min_version),
)
else:
# strict_min_version for this app shouldn't be lower than the
# default min version for this app.
strict_min_version = max(
self.strict_min_version, VersionString(default_min_version)
)
strict_max_version = self.strict_max_version or VersionString(
amo.DEFAULT_WEBEXT_MAX_VERSION
)
if strict_max_version < strict_min_version:
strict_max_version = strict_min_version
qs = AppVersion.objects.filter(application=app.id)
try:
min_appver = qs.get(version=strict_min_version)
except AppVersion.DoesNotExist:
msg = gettext(
'Unknown "strict_min_version" {appver} for {app}'.format(
app=app.pretty, appver=strict_min_version
)
)
raise forms.ValidationError(msg)
try:
max_appver = qs.get(version=strict_max_version)
except AppVersion.DoesNotExist:
# If the specified strict_max_version can't be found, raise an
# error: we used to use '*' instead but this caused more
# problems, especially with langpacks that are really specific
# to a given Firefox version.
msg = gettext(
'Unknown "strict_max_version" {appver} for {app}'.format(
app=app.pretty, appver=strict_max_version
)
)
raise forms.ValidationError(msg)
yield Extractor.App(appdata=app, id=app.id, min=min_appver, max=max_appver)
def target_locale(self):
"""Guess target_locale for a dictionary from manifest contents."""
try:
dictionaries = self.get('dictionaries', {})
key = force_str(list(dictionaries.keys())[0])
return key[:255]
except (IndexError, UnicodeDecodeError):
# This shouldn't happen: the linter should prevent it, but
# just in case, handle the error (without bothering with
# translations as users should never see this).
raise forms.ValidationError('Invalid dictionaries object.')
def parse(self, minimal=False):
data = {
'guid': self.guid,
'type': self.type,
'version': str(self.get('version', '')),
'name': self.get('name'),
'summary': self.get('description'),
'homepage': self.homepage,
'default_locale': self.get('default_locale'),
'manifest_version': self.get('manifest_version'),
'install_origins': self.install_origins,
}
# Populate certificate information (e.g signed by mozilla or not)
# early on to be able to verify compatibility based on it
if self.certinfo is not None:
data.update(self.certinfo.parse())
if self.type == amo.ADDON_STATICTHEME:
data['theme'] = self.get('theme', {})
if not minimal:
data.update(
{
'apps': list(self.apps()),
# Langpacks have strict compatibility enabled, rest of
# webextensions don't.
'strict_compatibility': data['type'] == amo.ADDON_LPAPP,
'is_experiment': self.is_experiment,
}
)
if self.type == amo.ADDON_EXTENSION:
# Only extensions have permissions and content scripts
data.update(
{
'optional_permissions': self.get('optional_permissions', []),
'permissions': self.get('permissions', []),
'content_scripts': self.get('content_scripts', []),
}
)
if self.get('devtools_page'):
data.update({'devtools_page': self.get('devtools_page')})
elif self.type == amo.ADDON_DICT:
data['target_locale'] = self.target_locale()
elif self.type == amo.ADDON_SITE_PERMISSION:
data['site_permissions'] = self.get('site_permissions', [])
return data
class SigningCertificateInformation:
"""Process the signature to determine the addon is a Mozilla Signed
extension, so is signed already with a special certificate. We want to
know this so we don't write over it later, and stop unauthorised people
from submitting them to AMO."""
def __init__(self, certificate_data):
pkcs7 = certificate_data
self.cert_ou = get_signer_organizational_unit_name(pkcs7)
@property
def is_mozilla_signed_ou(self):
return self.cert_ou == 'Mozilla Extensions'
def parse(self):
return {'is_mozilla_signed_extension': self.is_mozilla_signed_ou}
class FSyncMixin:
"""Mixin that implements fsync for file extractions.
This mixin uses the `_extract_member` interface used by `ziplib` and
`tarfile` so it's somewhat unversal.
We need this to make sure that on EFS / NFS all data is immediately
written to avoid any data loss on the way.
"""
def _fsync_dir(self, path):
descriptor = os.open(path, os.O_DIRECTORY)
try:
os.fsync(descriptor)
except OSError as exc:
# On some filesystem doing a fsync on a directory
# raises an EINVAL error. Ignoring it is usually safe.
if exc.errno != errno.EINVAL:
raise
os.close(descriptor)
def _fsync_file(self, path):
descriptor = os.open(path, os.O_RDONLY)
os.fsync(descriptor)
os.close(descriptor)
def _extract_member(self, member, targetpath, *args, **kwargs):
"""Extends `ZipFile._extract_member` to call fsync().
For every extracted file we are ensuring that it's data has been
written to disk. We are doing this to avoid any data inconsistencies
that we have seen in the past.
To do this correctly we are fsync()ing all directories as well
only that will ensure we have a durable write for that specific file.
This is inspired by https://github.com/2ndquadrant-it/barman/
(see backup.py -> backup_fsync_and_set_sizes and utils.py)
"""
super()._extract_member(member, targetpath, *args, **kwargs)
parent_dir = os.path.dirname(os.path.normpath(targetpath))
if parent_dir:
self._fsync_dir(parent_dir)
self._fsync_file(targetpath)
class FSyncedZipFile(FSyncMixin, zipfile.ZipFile):
"""Subclass of ZipFile that calls `fsync` for file extractions."""
pass
class FSyncedTarFile(FSyncMixin, tarfile.TarFile):
"""Subclass of TarFile that calls `fsync` for file extractions."""
pass
def archive_member_validator(archive, member, ignore_filename_errors=False):
"""Validate a member of an archive member (TarInfo or ZipInfo)."""
filename = getattr(member, 'filename', getattr(member, 'name', None))
filesize = getattr(member, 'file_size', getattr(member, 'size', None))
_validate_archive_member_name_and_size(filename, filesize, ignore_filename_errors)
def _validate_archive_member_name_and_size(
filename, filesize, ignore_filename_errors=False
):
if filename is None or filesize is None:
raise InvalidZipFile(gettext('Unsupported archive type.'))
try:
force_str(filename)
except UnicodeDecodeError:
# We can't log the filename unfortunately since it's encoding
# is obviously broken :-/
log.error('Extraction error, invalid file name encoding')
msg = gettext(
'Invalid file name in archive. Please make sure '
'all filenames are utf-8 or latin1 encoded.'
)
raise InvalidZipFile(msg)
if not ignore_filename_errors:
if (
'\\' in filename
or '../' in filename
or '..' == filename
or filename.startswith('/')
):
log.error('Extraction error, invalid file name: %s' % (filename))
# L10n: {0} is the name of the invalid file.
msg = gettext('Invalid file name in archive: {0}')
raise InvalidZipFile(msg.format(filename))
if filesize > settings.FILE_UNZIP_SIZE_LIMIT:
log.error(f'Extraction error, file too big for file ({filename}): {filesize}')
# L10n: {0} is the name of the invalid file.
msg = gettext('File exceeding size limit in archive: {0}')
raise InvalidZipFile(msg.format(filename))
class SafeZip:
def __init__(
self, source, mode='r', force_fsync=False, ignore_filename_errors=False
):
self.source = source
self.info_list = None
self.mode = mode
self.force_fsync = force_fsync
self.ignore_filename_errors = ignore_filename_errors
self.initialize_and_validate()
def initialize_and_validate(self):
"""
Runs some overall archive checks.
"""
if self.force_fsync:
zip_file = FSyncedZipFile(self.source, self.mode)
else:
zip_file = zipfile.ZipFile(self.source, self.mode)
info_list = zip_file.infolist()
total_file_size = 0
for info in info_list:
total_file_size += info.file_size
archive_member_validator(self.source, info, self.ignore_filename_errors)
if total_file_size >= settings.MAX_ZIP_UNCOMPRESSED_SIZE:
raise InvalidZipFile(gettext('Uncompressed size is too large'))
self.info_list = info_list
self.zip_file = zip_file
def is_signed(self):
"""Tells us if an addon is signed."""
finds = []
for info in self.info_list:
match = SIGNED_RE.match(info.filename)
if match:
name, ext = match.groups()
# If it's rsa or sf, just look for the opposite.
if (name, {'rsa': 'sf', 'sf': 'rsa'}[ext]) in finds:
return True
finds.append((name, ext))
def extract_from_manifest(self, manifest):
"""
Extracts a file given a manifest such as:
jar:chrome/de.jar!/locale/de/browser/
or
locale/de/browser
"""
type, path = manifest.split(':')
jar = self
if type == 'jar':
parts = path.split('!')
for part in parts[:-1]:
jar = self.__class__(io.BytesIO(jar.zip_file.read(part)))
path = parts[-1]
return jar.read(path[1:] if path.startswith('/') else path)
def extract_info_to_dest(self, info, dest):
"""Extracts the given info to a directory and checks the file size."""
self.zip_file.extract(info, dest)
dest = os.path.join(dest, info.filename)
if not os.path.isdir(dest):
# Directories consistently report their size incorrectly.
size = os.stat(dest)[stat.ST_SIZE]
if size != info.file_size:
log.error(
'Extraction error, uncompressed size: %s, %s not %s'
% (self.source, size, info.file_size)
)
raise forms.ValidationError(gettext('Invalid archive.'))
def extract_to_dest(self, dest):
"""Extracts the zip file to a directory."""
for info in self.info_list:
self.extract_info_to_dest(info, dest)
def close(self):
self.zip_file.close()
@property
def filelist(self):
return self.zip_file.filelist
@property
def namelist(self):
return self.zip_file.namelist
def exists(self, path):
try:
return self.zip_file.getinfo(path)
except KeyError:
return False
def read(self, path):
return self.zip_file.read(path)
def extract_zip(source, remove=False, force_fsync=False, tempdir=None):
"""Extracts the zip file. If remove is given, removes the source file."""
if tempdir is None:
tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH)
try:
zip_file = SafeZip(source, force_fsync=force_fsync)
zip_file.extract_to_dest(tempdir)
except Exception:
rm_local_tmp_dir(tempdir)
raise
if remove:
os.remove(source)
return tempdir
def extract_extension_to_dest(source, dest=None, force_fsync=False):
"""Extract `source` to `dest`.
`source` is the path to an extension or extension source, which can be a
zip or a compressed tar (gzip, bzip)
Note that this doesn't verify the contents of `source` except for
that it requires something valid to be extracted.
:returns: Extraction target directory, if `dest` is `None` it'll be a
temporary directory.
:raises FileNotFoundError: if the source file is not found on the filestem
:raises forms.ValidationError: if the zip is invalid
"""
target, tempdir = None, None
if dest is None:
target = tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH)
else:
target = dest
try:
source = force_str(source)
if source.endswith(('.zip', '.xpi')):
with open(source, 'rb') as source_file:
zip_file = SafeZip(source_file, force_fsync=force_fsync)
zip_file.extract_to_dest(target)
elif source.endswith(('.tar.gz', '.tar.bz2', '.tgz')):
tarfile_class = tarfile.TarFile if not force_fsync else FSyncedTarFile
with tarfile_class.open(source) as archive:
archive.extractall(target)
else:
raise FileNotFoundError # Unsupported file, shouldn't be reached
except (zipfile.BadZipFile, tarfile.ReadError, OSError, forms.ValidationError) as e:
if tempdir is not None:
rm_local_tmp_dir(tempdir)
if isinstance(e, (FileNotFoundError, forms.ValidationError)):
# We let FileNotFoundError (which are a subclass of IOError, or
# rather OSError but that's an alias) and ValidationError be
# raised, the caller will have to deal with it.
raise
# Any other exceptions we caught, we raise a generic ValidationError
# instead.
raise forms.ValidationError(gettext('Invalid or broken archive.'))
return target
def copy_over(source, dest):
"""
Copies from the source to the destination, removing the destination
if it exists and is a directory.
"""
if os.path.exists(dest) and os.path.isdir(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
# mkdtemp will set the directory permissions to 700
# for the webserver to read them, we need 755
os.chmod(
dest, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
)
shutil.rmtree(source)
def get_all_files(folder, strip_prefix='', prefix=None):
"""Return all files in a file/directory tree.
:param folder: The folder of which to return the file-tree.
:param strip_prefix str: A string to strip in case we're adding a custom
`prefix` Doesn't have any implications if
`prefix` isn't given.
:param prefix: A custom prefix to add to all files and folders.
"""
all_files = []
# Not using os.path.walk so we get just the right order.
def iterate(path):
path_dirs, path_files = storage.listdir(path)
for dirname in sorted(path_dirs):
full = os.path.join(path, force_str(dirname))
all_files.append(full)
iterate(full)
for filename in sorted(path_files):
full = os.path.join(path, force_str(filename))
all_files.append(full)
iterate(folder)
if prefix is not None:
# This is magic: strip the prefix, e.g /tmp/ and prepend the prefix
all_files = [
os.path.join(prefix, fname[len(strip_prefix) + 1 :]) for fname in all_files
]
return all_files
def extract_xpi(xpi, path):
"""Extract all files from `xpi` to `path`.
This can be removed in favour of our already extracted git-repositories
once we land and tested them in production.
"""
tempdir = extract_zip(xpi)
all_files = get_all_files(tempdir)
copy_over(tempdir, path)
return all_files
def parse_xpi(xpi, addon=None, minimal=False, user=None):
"""Extract and parse an XPI. Returns a dict with various properties
describing the xpi.
Will raise ValidationError if something went wrong while parsing.
If minimal is True, it avoids validation as much as possible (still raising
ValidationError for hard errors like I/O or invalid json) and returns
only the minimal set of properties needed to decide what to do with the
add-on: guid and version.
"""
try:
xpi = get_file(xpi)
xpi_info = Extractor.parse(xpi, minimal=minimal)
except forms.ValidationError:
raise
except OSError as e:
if len(e.args) < 2:
err, strerror = None, e.args[0]
else:
err, strerror = e.args
log.error(f'I/O error({err}): {strerror}')
# Note: we don't really know what happened, so even though we return a
# generic message about the manifest, don't raise InvalidManifest. We
# want the validation to stop there.
raise forms.ValidationError(gettext('Could not parse the manifest file.'))
except Exception:
# As above, don't raise InvalidManifest here.
log.error('XPI parse error', exc_info=True)
raise forms.ValidationError(gettext('Could not parse the manifest file.'))
if minimal:
return xpi_info
return check_xpi_info(xpi_info, addon, xpi, user=user)
def check_xpi_info(xpi_info, addon=None, xpi_file=None, user=None):
from olympia.addons.models import Addon, DeniedGuid
guid = xpi_info['guid']
# If we allow the guid to be omitted we assume that one was generated
# or existed before and use that one.
# An example are WebExtensions that don't require a guid but we generate
# one once they're uploaded. Now, if you update that WebExtension we
# just use the original guid.
if addon and not guid:
xpi_info['guid'] = guid = addon.guid
if guid:
if user:
deleted_guid_clashes = Addon.unfiltered.exclude(authors__id=user.id).filter(
guid=guid
)
else:
deleted_guid_clashes = Addon.unfiltered.filter(guid=guid)
if addon and addon.guid != guid:
msg = gettext(
'The add-on ID in your manifest.json (%s) '
'does not match the ID of your add-on on AMO (%s)'
)
raise forms.ValidationError(msg % (guid, addon.guid))
if (
not addon
# Non-deleted add-ons.
and (
Addon.objects.filter(guid=guid).exists()
# DeniedGuid objects for deletions for Mozilla disabled add-ons
or DeniedGuid.objects.filter(guid=guid).exists()
# Deleted add-ons that don't belong to the uploader.
or deleted_guid_clashes.exists()
)
):
raise forms.ValidationError(gettext('Duplicate add-on ID found.'))
if len(xpi_info['version']) > 32:
raise forms.ValidationError(
gettext('Version numbers should have fewer than 32 characters.')
)
if not VERSION_RE.match(xpi_info['version']):
raise forms.ValidationError(
gettext(
'Version numbers should only contain letters, numbers, '
'and these punctuation characters: +*.-_.'
)
)
if xpi_info.get('type') == amo.ADDON_STATICTHEME:
max_size = settings.MAX_STATICTHEME_SIZE
if xpi_file and xpi_file.size > max_size:
raise forms.ValidationError(
gettext('Maximum size for WebExtension themes is {0}.').format(
filesizeformat(max_size)
)
)
if xpi_file:
# Make sure we pass in a copy of `xpi_info` since
# `resolve_webext_translations` modifies data in-place
translations = Addon.resolve_webext_translations(xpi_info.copy(), xpi_file)
verify_mozilla_trademark(translations['name'], user)
# Parse the file to get and validate package data with the addon.
if not acl.experiments_submission_allowed(user, xpi_info):
raise forms.ValidationError(gettext('You cannot submit this type of add-on'))
if not addon and not acl.reserved_guid_addon_submission_allowed(user, xpi_info):
raise forms.ValidationError(
gettext('You cannot submit an add-on using an ID ending with this suffix')
)
if not acl.mozilla_signed_extension_submission_allowed(user, xpi_info):
raise forms.ValidationError(
gettext('You cannot submit a Mozilla Signed Extension')
)
if (
not addon
and guid
and guid.lower().endswith(amo.RESERVED_ADDON_GUIDS)
and not xpi_info.get('is_mozilla_signed_extension')
):
raise forms.ValidationError(
gettext(
'Add-ons using an ID ending with this suffix need to be signed with '
'privileged certificate before being submitted'
)
)
if not acl.langpack_submission_allowed(user, xpi_info):
raise forms.ValidationError(gettext('You cannot submit a language pack'))
if not acl.site_permission_addons_submission_allowed(user, xpi_info):
raise forms.ValidationError(gettext('You cannot submit this type of add-on'))
return xpi_info
def parse_addon(pkg, addon=None, user=None, minimal=False):
"""
Extract and parse a file path, UploadedFile or FileUpload. Returns a dict
with various properties describing the add-on.
Will raise ValidationError if something went wrong while parsing.
`addon` parameter is mandatory if the file being parsed is going to be
attached to an existing Addon instance.
`user` parameter is mandatory unless minimal `parameter` is True. It should
point to the UserProfile responsible for the upload.
If `minimal` parameter is True, it avoids validation as much as possible
(still raising ValidationError for hard errors like I/O or invalid
json) and returns only the minimal set of properties needed to decide
what to do with the add-on (the exact set depends on the add-on type, but
it should always contain at least guid, type and version.
"""
name = getattr(pkg, 'name', pkg)
if name.endswith(amo.VALID_ADDON_FILE_EXTENSIONS):
parsed = parse_xpi(pkg, addon, minimal=minimal, user=user)
else:
valid_extensions_string = '(%s)' % ', '.join(amo.VALID_ADDON_FILE_EXTENSIONS)
raise UnsupportedFileType(
gettext(
'Unsupported file type, please upload a supported '
'file {extensions}.'.format(extensions=valid_extensions_string)
)
)
if not minimal:
if user is None:
# This should never happen and means there is a bug in
# addons-server itself.
raise forms.ValidationError(gettext('Unexpected error.'))
# FIXME: do the checks depending on user here.
if addon and addon.type != parsed['type']:
msg = gettext(
'The type (%s) does not match the type of your add-on on AMO (%s)'
)
raise forms.ValidationError(msg % (parsed['type'], addon.type))
return parsed
def get_sha256(file_obj, block_size=io.DEFAULT_BUFFER_SIZE):
"""Calculate a sha256 hash for `file_obj`.
`file_obj` must be an open file descriptor. The caller needs to take
care of closing it properly.
"""
hash_ = hashlib.sha256()
for chunk in iter(lambda: file_obj.read(block_size), b''):
hash_.update(chunk)
return hash_.hexdigest()
def update_version_number(file_obj, new_version_number):
"""Update the manifest to have the new version number."""
# Create a new xpi with the updated version.
updated = f'{file_obj.file_path}.updated_version_number'
# Copy the original XPI, with the updated manifest.json.
with zipfile.ZipFile(file_obj.file_path, 'r') as source:
file_list = source.infolist()
with zipfile.ZipFile(updated, 'w', zipfile.ZIP_DEFLATED) as dest:
for file_ in file_list:
content = source.read(file_.filename)
if file_.filename == 'manifest.json':
content = _update_version_in_json_manifest(
content, new_version_number
)
dest.writestr(file_, content)
# Move the updated file to the original file.
shutil.move(updated, file_obj.file_path)
class InvalidOrUnsupportedCrx(Exception):
pass
def write_crx_as_xpi(chunks, target):
"""Extract and strip the header from the CRX, convert it to a regular ZIP
archive, then write it to `target`, returning the sha256 hash hex digest.
Read more about the CRX file format:
https://developer.chrome.com/extensions/crx
"""
# First we open the uploaded CRX so we can see how much we need
# to trim from the header of the file to make it a valid ZIP.
with tempfile.NamedTemporaryFile('w+b', dir=settings.TMP_PATH) as tmp:
for chunk in chunks:
tmp.write(chunk)
tmp.seek(0)
# Where we have to start to find the zip depends on the version of the
# crx format. First let's confirm it's a crx by looking at the first
# 4 bytes (32 bits)
if tmp.read(4) != b'Cr24':
raise InvalidOrUnsupportedCrx('CRX file does not start with Cr24')
try:
# Then read the version, which is in the next 4 bytes
version = struct.unpack('<I', tmp.read(4))[0]
# Then find out where we need to start from to find the zip inside.
if version == 2:
header_info = struct.unpack('<II', tmp.read(8))
public_key_length = header_info[0]
signature_length = header_info[1]
# Start position is where we are so far (4 + 4 + 8 = 16) + the
# two length values we extracted.
start_position = 16 + public_key_length + signature_length
elif version == 3:
# Start position is where we are so far (4 + 4 + 4 = 12) + the
# single header length value we extracted.
header_length = struct.unpack('<I', tmp.read(4))[0]
start_position = 12 + header_length
else:
raise InvalidOrUnsupportedCrx('Unsupported CRX version')
except struct.error:
raise InvalidOrUnsupportedCrx('Invalid or corrupt CRX file')
# We can start reading the zip to write it where it needs to live on
# the filesystem and then return the hash to the caller. If we somehow
# don't end up with a valid xpi file, validation will raise an error
# later.
tmp.seek(start_position)
hash_value = hashlib.sha256()
# Now we open the Django storage and write our real XPI file.
with storage.open(target, 'wb') as file_destination:
data = tmp.read(65536)
# Keep reading bytes and writing them to the XPI.
while data:
hash_value.update(data)
file_destination.write(data)
data = tmp.read(65536)
return hash_value
def _update_version_in_json_manifest(content, new_version_number):
"""Change the version number in the json manifest file provided."""
updated = json.loads(content)
if 'version' in updated:
updated['version'] = new_version_number
return json.dumps(updated)
def extract_translations(file_obj):
"""Extract all translation messages from `file_obj`.
:param locale: if not `None` the list will be restricted only to `locale`.
"""
xpi = get_filepath(file_obj)
messages = {}
try:
with zipfile.ZipFile(xpi, 'r') as source:
file_list = source.namelist()
# Fetch all locales the add-on supports
# see https://developer.chrome.com/extensions/i18n#overview-locales
# for more details on the format.
locales = {
name.split('/')[1]
for name in file_list
if name.startswith('_locales/') and name.endswith('/messages.json')
}
for locale in locales:
corrected_locale = find_language(locale)
# Filter out languages we don't support.
if not corrected_locale:
continue
fname = f'_locales/{locale}/messages.json'
try:
data = source.read(fname)
messages[corrected_locale] = decode_json(data)
except (ValueError, KeyError):
# `ValueError` thrown by `decode_json` if the json is
# invalid and `KeyError` thrown by `source.read`
# usually means the file doesn't exist for some reason,
# we fail silently
continue
except OSError:
pass
return messages
def resolve_i18n_message(message, messages, locale, default_locale=None):
"""Resolve a translatable string in an add-on.
This matches ``__MSG_extensionName__`` like names and returns the correct
translation for `locale`.
:param locale: The locale to fetch the translation for, If ``None``
(default) ``settings.LANGUAGE_CODE`` is used.
:param messages: A dictionary of messages, e.g the return value
of `extract_translations`.
"""
if not message or not isinstance(message, str):
# Don't even attempt to extract invalid data.
# See https://github.com/mozilla/addons-server/issues/3067
# for more details
return message
match = MSG_RE.match(message)
if match is None:
return message
locale = find_language(locale)
if default_locale:
default_locale = find_language(default_locale)
msgid = match.group('msgid')
default = {'message': message}
if locale in messages:
message = messages[locale].get(msgid, default)
elif default_locale in messages:
message = messages[default_locale].get(msgid, default)
if not isinstance(message, dict):
# Fallback for invalid message format, should be caught by
# addons-linter in the future but we'll have to handle it.
# See https://github.com/mozilla/addons-server/issues/3485
return default['message']
return message['message']
def get_background_images(file_obj, theme_data, header_only=False):
"""Extract static theme header image from `file_obj` and return in dict."""
xpi = get_filepath(file_obj)
if not theme_data:
# we might already have theme_data, but otherwise get it from the xpi.
try:
parsed_data = parse_xpi(xpi, minimal=True)
theme_data = parsed_data.get('theme', {})
except forms.ValidationError:
# If we can't parse the existing manifest safely return.
return {}
images_dict = theme_data.get('images', {})
# Get the reference in the manifest. headerURL is the deprecated variant.
header_url = images_dict.get('theme_frame', images_dict.get('headerURL'))
# And any additional backgrounds too.
additional_urls = (
images_dict.get('additional_backgrounds', []) if not header_only else []
)
image_urls = [header_url] + additional_urls
images = {}
try:
with zipfile.ZipFile(xpi, 'r') as source:
for url in image_urls:
_, file_ext = os.path.splitext(str(url).lower())
if file_ext not in amo.THEME_BACKGROUND_EXTS:
# Just extract image files.
continue
try:
images[url] = source.read(url)
except KeyError:
pass
except OSError as ioerror:
log.info(ioerror)
return images
@contextlib.contextmanager
def run_with_timeout(seconds):
"""Implement timeouts via `signal`.
This is being used to implement timeout handling when acquiring locks.
"""
def timeout_handler(signum, frame):
"""
Since Python 3.5 `fcntl` is retried automatically when interrupted.
We need an exception to stop it. This exception will propagate on
to the main thread, make sure `flock` is called there.
"""
raise TimeoutError
original_handler = signal.signal(signal.SIGALRM, timeout_handler)
try:
signal.alarm(seconds)
yield
finally:
signal.alarm(0)
signal.signal(signal.SIGALRM, original_handler)
@contextlib.contextmanager
def lock(lock_dir, lock_name, timeout=6):
"""A wrapper around fcntl to be used as a context manager.
Additionally this helper allows the caller to wait for a lock for a certain
amount of time.
Example::
with lock(settings.TMP_PATH, 'extraction-1234'):
extract_xpi(...)
The lock is properly released at the end of the context block.
This locking mechanism should work perfectly fine with NFS v4 and EFS
(which uses the NFS v4.1 protocol).
:param timeout: Timeout for how long we expect to wait for a lock in
seconds. If 0 the function returns immediately, otherwise
it blocks the execution.
:return: `True` if the lock was attained, we are owning the lock,
`False` if there is an already existing lock.
"""
lock_name = f'{lock_name}.lock'
log.info(f'Acquiring lock {lock_name}.')
lock_path = os.path.join(lock_dir, lock_name)
with open(lock_path, 'w') as lockfd:
lockfd.write(f'{os.getpid()}')
fileno = lockfd.fileno()
try:
with run_with_timeout(timeout):
fcntl.flock(fileno, fcntl.LOCK_EX)
except (BlockingIOError, TimeoutError):
# Another process already holds the lock.
# In theory, in this case we'd always catch
# `TimeoutError` but for the sake of completness let's
# catch `BlockingIOError` too to be on the safe side.
yield False
else:
# We successfully acquired the lock.
yield True
finally:
# Always release the lock after the parent context
# block has finised.
log.info(f'Releasing lock {lock_name}.')
fcntl.flock(fileno, fcntl.LOCK_UN)
lockfd.close()
try:
os.unlink(lock_path)
except FileNotFoundError:
pass
| |
import argparse
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
from perf.errsim import *
def plot_x_vs_pmf(params, show=True, fpath=None):
def plot(ax, x, param, **plotargs):
if param['pb'] is None:
param['pb'] = param['pe']
label = 'BSC pe={pe} m={m} n={n}'.format(**param)
else:
label = 'GBMM pe={pe} pb={pb} m={m} n={n}'.format(**param)
pmf = errpmf(**param)
if 'label' not in plotargs:
plotargs['label'] = label
ax.plot(x, pmf[x], **plotargs)
plt.close('all')
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
t = np.arange(11)
for param in params:
plot(ax, t, param.copy(), lw=1.5)
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_ylim(1e-25, 1e-1)
ax.set_ylabel('PMF, $p_X(x)$')
ax.set_yscale('log')
ax.grid(True)
ax.set_xticks(t)
ax.set_xlabel('Number of Symbols, $x$')
ax.set_title('Symbol Error PMF (Prob. of x errors in n digits)')
ax.legend(fontsize=12)
if fpath:
fig.savefig(fpath)
if show:
plt.show()
def plot_x_vs_pndc(params, show=True, fpath=None):
def plot(ax, x, param, **plotargs):
if param['pb'] is None:
param['pb'] = param['pe']
label = 'BSC pe={pe} m={m} n={n}'.format(**param)
else:
label = 'GBMM pe={pe} pb={pb} m={m} n={n}'.format(**param)
pmf = errpmf(**param)
pndc = prob_ndc(pmf)
if 'label' not in plotargs:
plotargs['label'] = label
ax.plot(x, pndc[x], **plotargs)
plt.close('all')
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
t = np.arange(11)
for param in params:
plot(ax, t, param.copy(), lw=1.5)
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_ylim(1e-25, 1e-1)
ax.set_ylabel('$P_{ndc}(t)$')
ax.set_yscale('log')
ax.grid(True)
ax.set_xticks(t)
ax.set_xlabel('Number of Symbols, $x$')
ax.set_title('Probability of not-decoding-correctly')
ax.legend(fontsize=12)
if fpath:
fig.savefig(fpath)
if show:
plt.show()
def plot_t_vs_ober(params, show=True, fpath=None):
def plot(ax, t, param, **plotargs):
if param['pb'] is None:
param['pb'] = param['pe']
label = 'BSC pe={pe} m={m} n={n}'.format(**param)
else:
label = 'GBMM pe={pe} pb={pb} m={m} n={n}'.format(**param)
pmf = errpmf(**param)
ober = ber_out(param['pe'], param['pb'], pmf)
if 'label' not in plotargs:
plotargs['label'] = label
ax.plot(t, ober[t], **plotargs)
plt.close('all')
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
t = np.arange(11)
for param in params:
plot(ax, t, param.copy(), lw=1.5)
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_ylim(1e-25, 1e-5)
ax.set_ylabel('Output BER, $BER_o$')
ax.set_yscale('log')
ax.grid(True)
ax.set_xticks(t)
ax.set_xlabel('Number of Symbols corrected, $t$')
ax.set_title('Number of Symbols Corrected vs. Output BER')
ax.legend(fontsize=12)
if fpath:
fig.savefig(fpath)
if show:
plt.show()
def plot_r_vs_ober(params, show=True, fpath=None):
def plot(axes, t, param, **plotargs):
if param['pb'] is None:
param['pb'] = param['pe']
label = 'BSC pe={pe} m={m} n={n}'.format(**param)
else:
label = 'GBMM pe={pe} pb={pb} m={m} n={n}'.format(**param)
pmf = errpmf(**param)
ober = ber_out(param['pe'], param['pb'], pmf)
if 'label' not in plotargs:
plotargs['label'] = label
n = param['n']
frac_t = 100 * t / n
k = n - 2 * t
r = k / n
axes[0].plot(frac_t, ober[t], **plotargs)
axes[1].plot(r, ober[t], **plotargs)
plt.close('all')
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=plt.figaspect(1/2))
t = np.arange(16)
for param in params:
plot(axes, t, param.copy(), lw=1.5)
for ax in axes:
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_ylim(1e-25, 1e-5)
ax.set_ylabel('Output BER, $BER_o$')
ax.set_yscale('log')
ax.grid(True)
axes[0].set_xlim(0, 10)
axes[0].set_xlabel('Fraction of Symbols corrected, $t/n$ [%]')
axes[0].set_title('Fraction of Symbols corrected vs. Output BER')
axes[0].legend(loc='upper right', fontsize=12)
axes[1].set_xlim(0.8, 1.0)
axes[1].set_xlabel('Coding Rate, $R = k/n = (n - 2t)/n$')
axes[1].set_title('Coding Rate vs. Output BER')
axes[1].legend(loc='upper left', fontsize=12)
plt.tight_layout()
if fpath:
fig.savefig(fpath)
if show:
plt.show()
def plot_pe_vs_ober(params, show=True, fpath=None):
def plot(ax, pe, param, **plotargs):
if param['pb'] is None:
label = 'BSC m={m} n={n} t={t}'.format(**param)
else:
label = 'GBMM pb={pb} m={m} n={n} t={t}'.format(**param)
ober = pe_vs_ober(pe, **param)
if 'label' not in plotargs:
plotargs['label'] = label
ax.plot(pe, ober, **plotargs)
plt.close('all')
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
pe = 10.0 ** np.arange(-15, -0.5, 0.5)
for param in params:
plot(ax, pe, param.copy(), lw=1.5)
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlim(pe[0], pe[-1])
ax.set_ylim(1e-25, 1e-1)
ax.set_xlabel('Input BER, $BER_i$')
ax.set_ylabel('Output BER, $BER_o$')
ax.set_title('Input vs. Output BER')
ax.legend(loc='upper left', fontsize=12)
ax.grid(True)
if fpath:
fig.savefig(fpath)
if show:
plt.show()
def plot_ebn0_vs_ober(params, show=True, fpath=None):
def plot(ax, ebn0, param, **plotargs):
if param['pb'] is None:
label = 'BSC m={m} n={n} t={t}'.format(**param)
else:
label = 'GBMM pb={pb} m={m} n={n} t={t}'.format(**param)
n = param['n']
t = param['t']
R = (n - 2 * t)/n
esn0 = ebn0 + dB(R)
pe = esn02pe(esn0)
ober = pe_vs_ober(pe, **param)
if 'label' not in plotargs:
plotargs['label'] = label
ax.plot(ebn0, ober, **plotargs)
plt.close('all')
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
ebn0 = np.arange(5, 20.5, 0.5)
# Uncoded (FEC input) for reference
pe = esn02pe(ebn0)
iber = ber_in(pe=pe, pb=0.5)
ax.plot(ebn0, pe, lw=1.5, color='black', label='Uncoded BSC')
ax.plot(ebn0, iber, lw=1.5, color='black', linestyle='dashed',
label='Uncoded GBMM(pb=0.5)')
for param in params:
plot(ax, ebn0, param.copy(), lw=1.5)
ax.axhline(1e-15, color='black', linestyle='dashed')
ax.set_yscale('log')
ax.set_xlim(ebn0[0], ebn0[-1])
ax.set_xticks(ebn0[::2])
ax.set_ylim(1e-25, 1e-1)
ax.set_xlabel('$E_b/N_0 [dB]$')
ax.set_ylabel('Output BER, $BER_o$')
ax.set_title('Eb/N0 vs. Output BER')
ax.legend(fontsize=10)
ax.grid(True)
if fpath:
fig.savefig(fpath)
if show:
plt.show()
if __name__ == '__main__':
argp = argparse.ArgumentParser(description='Create code performance plots.')
argp.add_argument('dir', metavar='DIR', help='plots directory')
argp.add_argument('--no-show', dest='show', action='store_false',
help='Don\'t show, just save to file.')
argns = argp.parse_args()
dirpath = os.path.abspath(argns.dir)
os.makedirs(dirpath, exist_ok=True)
# pe vs ober
params = [
# GBMM
dict(pb=0.5, m=8, n=124, t=4),
dict(pb=0.5, m=8, n=124, t=6),
dict(pb=0.5, m=8, n=124, t=8),
dict(pb=0.5, m=8, n=248, t=4),
dict(pb=0.5, m=8, n=248, t=6),
dict(pb=0.5, m=8, n=248, t=8),
dict(pb=0.5, m=10, n=528, t=7),
# BSC
dict(pb=None, m=8, n=124, t=4),
dict(pb=None, m=8, n=248, t=4)]
plot_pe_vs_ober(params, argns.show, os.path.join(dirpath, 'pe-vs-ober.png'))
plot_ebn0_vs_ober(params, argns.show, os.path.join(dirpath, 'ebn0-vs-ober.png'))
params = [
# GBMM
dict(pb=0.5, m=8, n=240//8, t=1),
dict(pb=0.5, m=8, n=240//8, t=2),
dict(pb=0.5, m=8, n=240//8, t=3),
# BSC
dict(pb=None, m=8, n=240//8, t=1),
dict(pb=None, m=8, n=240//8, t=2),
dict(pb=None, m=8, n=240//8, t=3)]
plot_pe_vs_ober(params, argns.show, os.path.join(dirpath, '240bits-pe-vs-ober.png'))
plot_ebn0_vs_ober(params, argns.show, os.path.join(dirpath, '240bits-ebn0-vs-ober.png'))
params = [
# GBMM
dict(pb=0.5, m=8, n=120//8, t=1),
dict(pb=0.5, m=8, n=120//8, t=2),
dict(pb=0.5, m=8, n=120//8, t=3),
# BSC
dict(pb=None, m=8, n=120//8, t=1),
dict(pb=None, m=8, n=120//8, t=2),
dict(pb=None, m=8, n=120//8, t=3)]
plot_pe_vs_ober(params, argns.show, os.path.join(dirpath, '120bits-pe-vs-ober.png'))
plot_ebn0_vs_ober(params, argns.show, os.path.join(dirpath, '120bits-ebn0-vs-ober.png'))
#sys.exit()
# Short codes
params = [
# GBMM
dict(pe=1e-12, pb=0.5, m=5, n=240//5),
dict(pe=1e-12, pb=0.5, m=8, n=240//8),
# BSC
dict(pe=1e-12, pb=None, m=5, n=240//5),
dict(pe=1e-12, pb=None, m=8, n=240//8)]
plot_x_vs_pmf(params, argns.show, os.path.join(dirpath, '240bits-x-vs-pmf.png'))
plot_x_vs_pndc(params, argns.show, os.path.join(dirpath, '240bits-x-vs-pndc.png'))
plot_t_vs_ober(params, argns.show, os.path.join(dirpath, '240bits-t-vs-ober.png'))
plot_r_vs_ober(params, argns.show, os.path.join(dirpath, '240bits-r-vs-ober.png'))
# Very short codes
params = [
# GBMM
dict(pe=1e-12, pb=0.5, m=5, n=120//5),
dict(pe=1e-12, pb=0.5, m=8, n=120//8),
# BSC
dict(pe=1e-12, pb=None, m=5, n=120//5),
dict(pe=1e-12, pb=None, m=8, n=120//8)]
plot_x_vs_pmf(params, argns.show, os.path.join(dirpath, '120bits-x-vs-pmf.png'))
plot_x_vs_pndc(params, argns.show, os.path.join(dirpath, '120bits-x-vs-pndc.png'))
plot_t_vs_ober(params, argns.show, os.path.join(dirpath, '120bits-t-vs-ober.png'))
plot_r_vs_ober(params, argns.show, os.path.join(dirpath, '120bits-r-vs-ober.png'))
# Practical codes
params = [
# GBMM
dict(pe=1e-6, pb=0.5, m=8, n=124),
dict(pe=1e-6, pb=0.5, m=8, n=248),
dict(pe=1e-6, pb=0.5, m=10, n=264),
dict(pe=1e-6, pb=0.5, m=10, n=528),
# BSC
dict(pe=1e-6, pb=None, m=8, n=124),
dict(pe=1e-6, pb=None, m=8, n=248)]
plot_x_vs_pmf(params, argns.show, os.path.join(dirpath, 'x-vs-pmf.png'))
plot_x_vs_pndc(params, argns.show, os.path.join(dirpath, 'x-vs-pndc.png'))
plot_t_vs_ober(params, argns.show, os.path.join(dirpath, 't-vs-ober.png'))
plot_r_vs_ober(params, argns.show, os.path.join(dirpath, 'r-vs-ober.png'))
| |
# Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import socket
import string
import urllib2
import urlparse
from command import Command
import utils
LOGGER = logging.getLogger(__name__)
class Request(urllib2.Request):
"""
Extends the urllib2.Request to support all HTTP request types.
"""
def __init__(self, url, data=None, method=None):
"""
Initialise a new HTTP request.
:Args:
- url - String for the URL to send the request to.
- data - Data to send with the request.
"""
if method is None:
method = data is not None and 'POST' or 'GET'
elif method != 'POST' and method != 'PUT':
data = None
self._method = method
urllib2.Request.__init__(self, url, data=data)
def get_method(self):
"""
Returns the HTTP method used by this request.
"""
return self._method
class Response(object):
"""
Represents an HTTP response.
"""
def __init__(self, fp, code, headers, url):
"""
Initialise a new Response.
:Args:
- fp - The response body file object.
- code - The HTTP status code returned by the server.
- headers - A dictionary of headers returned by the server.
- url - URL of the retrieved resource represented by this Response.
"""
self.fp = fp
self.read = fp.read
self.code = code
self.headers = headers
self.url = url
def close(self):
"""
Close the response body file object.
"""
self.read = None
self.fp = None
def info(self):
"""
Returns the response headers.
"""
return self.headers
def geturl(self):
"""
Returns the URL for the resource returned in this response.
"""
return self.url
class HttpErrorHandler(urllib2.HTTPDefaultErrorHandler):
"""
A custom HTTP error handler.
Used to return Response objects instead of raising an HTTPError exception.
"""
def http_error_default(self, req, fp, code, msg, headers):
"""
Default HTTP error handler.
:Args:
- req - The original Request object.
- fp - The response body file object.
- code - The HTTP status code returned by the server.
- msg - The HTTP status message returned by the server.
- headers - The response headers.
:Returns:
A new Response object.
"""
return Response(fp, code, headers, req.get_full_url())
class RemoteConnection(object):
"""
A connection with the Remote WebDriver server.
Communicates with the server using the WebDriver wire protocol:
http://code.google.com/p/selenium/wiki/JsonWireProtocol
"""
def __init__(self, remote_server_addr):
# Attempt to resolve the hostname and get an IP address.
parsed_url = urlparse.urlparse(remote_server_addr)
if parsed_url.hostname:
try:
netloc = socket.gethostbyname(parsed_url.hostname)
if parsed_url.port:
netloc += ':%d' % parsed_url.port
if parsed_url.username:
auth = parsed_url.username
if parsed_url.password:
auth += ':%s' % parsed_url.password
netloc = '%s@%s' % (auth, netloc)
remote_server_addr = urlparse.urlunparse(
(parsed_url.scheme, netloc, parsed_url.path,
parsed_url.params, parsed_url.query, parsed_url.fragment))
except socket.gaierror:
LOGGER.info('Could not get IP address for host: %s' %
parsed_url.hostname)
self._url = remote_server_addr
self._commands = {
Command.NEW_SESSION: ('POST', '/session'),
Command.QUIT: ('DELETE', '/session/$sessionId'),
Command.GET_CURRENT_WINDOW_HANDLE:
('GET', '/session/$sessionId/window_handle'),
Command.GET_WINDOW_HANDLES:
('GET', '/session/$sessionId/window_handles'),
Command.GET: ('POST', '/session/$sessionId/url'),
Command.GO_FORWARD: ('POST', '/session/$sessionId/forward'),
Command.GO_BACK: ('POST', '/session/$sessionId/back'),
Command.REFRESH: ('POST', '/session/$sessionId/refresh'),
Command.EXECUTE_SCRIPT: ('POST', '/session/$sessionId/execute'),
Command.GET_CURRENT_URL: ('GET', '/session/$sessionId/url'),
Command.GET_TITLE: ('GET', '/session/$sessionId/title'),
Command.GET_PAGE_SOURCE: ('GET', '/session/$sessionId/source'),
Command.SCREENSHOT: ('GET', '/session/$sessionId/screenshot'),
Command.SET_BROWSER_VISIBLE:
('POST', '/session/$sessionId/visible'),
Command.IS_BROWSER_VISIBLE: ('GET', '/session/$sessionId/visible'),
Command.FIND_ELEMENT: ('POST', '/session/$sessionId/element'),
Command.FIND_ELEMENTS: ('POST', '/session/$sessionId/elements'),
Command.GET_ACTIVE_ELEMENT:
('POST', '/session/$sessionId/element/active'),
Command.FIND_CHILD_ELEMENT:
('POST', '/session/$sessionId/element/$id/element'),
Command.FIND_CHILD_ELEMENTS:
('POST', '/session/$sessionId/element/$id/elements'),
Command.CLICK_ELEMENT: ('POST', '/session/$sessionId/element/$id/click'),
Command.CLEAR_ELEMENT: ('POST', '/session/$sessionId/element/$id/clear'),
Command.SUBMIT_ELEMENT: ('POST', '/session/$sessionId/element/$id/submit'),
Command.GET_ELEMENT_TEXT: ('GET', '/session/$sessionId/element/$id/text'),
Command.SEND_KEYS_TO_ELEMENT:
('POST', '/session/$sessionId/element/$id/value'),
Command.SEND_KEYS_TO_ACTIVE_ELEMENT:
('POST', '/session/$sessionId/keys'),
Command.UPLOAD_FILE: ('POST', "/session/$sessionId/file"),
Command.GET_ELEMENT_VALUE:
('GET', '/session/$sessionId/element/$id/value'),
Command.GET_ELEMENT_TAG_NAME:
('GET', '/session/$sessionId/element/$id/name'),
Command.IS_ELEMENT_SELECTED:
('GET', '/session/$sessionId/element/$id/selected'),
Command.SET_ELEMENT_SELECTED:
('POST', '/session/$sessionId/element/$id/selected'),
Command.TOGGLE_ELEMENT:
('POST', '/session/$sessionId/element/$id/toggle'),
Command.IS_ELEMENT_ENABLED:
('GET', '/session/$sessionId/element/$id/enabled'),
Command.IS_ELEMENT_DISPLAYED:
('GET', '/session/$sessionId/element/$id/displayed'),
Command.HOVER_OVER_ELEMENT:
('POST', '/session/$sessionId/element/$id/hover'),
Command.GET_ELEMENT_LOCATION:
('GET', '/session/$sessionId/element/$id/location'),
Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW:
('GET', '/session/$sessionId/element/$id/location_in_view'),
Command.GET_ELEMENT_SIZE:
('GET', '/session/$sessionId/element/$id/size'),
Command.GET_ELEMENT_ATTRIBUTE:
('GET', '/session/$sessionId/element/$id/attribute/$name'),
Command.ELEMENT_EQUALS:
('GET', '/session/$sessionId/element/$id/equals/$other'),
Command.GET_ALL_COOKIES: ('GET', '/session/$sessionId/cookie'),
Command.ADD_COOKIE: ('POST', '/session/$sessionId/cookie'),
Command.DELETE_ALL_COOKIES:
('DELETE', '/session/$sessionId/cookie'),
Command.DELETE_COOKIE:
('DELETE', '/session/$sessionId/cookie/$name'),
Command.SWITCH_TO_FRAME: ('POST', '/session/$sessionId/frame'),
Command.SWITCH_TO_WINDOW: ('POST', '/session/$sessionId/window'),
Command.CLOSE: ('DELETE', '/session/$sessionId/window'),
Command.DRAG_ELEMENT:
('POST', '/session/$sessionId/element/$id/drag'),
Command.GET_SPEED: ('GET', '/session/$sessionId/speed'),
Command.SET_SPEED: ('POST', '/session/$sessionId/speed'),
Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY:
('GET', '/session/$sessionId/element/$id/css/$propertyName'),
Command.IMPLICIT_WAIT:
('POST', '/session/$sessionId/timeouts/implicit_wait'),
Command.EXECUTE_ASYNC_SCRIPT: ('POST','/session/$sessionId/execute_async'),
Command.SET_SCRIPT_TIMEOUT:
('POST', '/session/$sessionId/timeouts/async_script'),
Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY:
('GET', '/session/$sessionId/element/$id/css/$propertyName'),
Command.DISMISS_ALERT:
('POST', '/session/$sessionId/dismiss_alert'),
Command.ACCEPT_ALERT:
('POST', '/session/$sessionId/accept_alert'),
Command.SET_ALERT_VALUE:
('POST', '/session/$sessionId/alert_text'),
Command.GET_ALERT_TEXT:
('GET', '/session/$sessionId/alert_text'),
Command.CLICK:
('POST', '/session/$sessionId/click'),
Command.DOUBLE_CLICK:
('POST', '/session/$sessionId/doubleclick'),
Command.MOUSE_DOWN:
('POST', '/session/$sessionId/buttondown'),
Command.MOUSE_UP:
('POST', '/session/$sessionId/buttonup'),
Command.MOVE_TO:
('POST', '/session/$sessionId/moveto'),
Command.GET_WINDOW_SIZE:
('GET', '/session/$sessionId/window/$windowHandle/size'),
Command.SET_WINDOW_SIZE:
('POST', '/session/$sessionId/window/$windowHandle/size'),
Command.GET_WINDOW_POSITION:
('GET', '/session/$sessionId/window/$windowHandle/position'),
Command.SET_WINDOW_POSITION:
('POST', '/session/$sessionId/window/$windowHandle/position'),
Command.MAXIMIZE_WINDOW:
('POST', '/session/$sessionId/window/$windowHandle/maximize'),
Command.SET_SCREEN_ORIENTATION:
('POST', '/session/$sessionId/orientation'),
Command.GET_SCREEN_ORIENTATION:
('GET', '/session/$sessionId/orientation'),
}
def execute(self, command, params):
"""
Send a command to the remote server.
Any path subtitutions required for the URL mapped to the command should be
included in the command parameters.
:Args:
- command - A string specifying the command to execute.
- params - A dictionary of named parameters to send with the command as
its JSON payload.
"""
command_info = self._commands[command]
assert command_info is not None, 'Unrecognised command %s' % command
data = utils.dump_json(params)
path = string.Template(command_info[1]).substitute(params)
url = '%s%s' % (self._url, path)
return self._request(url, method=command_info[0], data=data)
def _request(self, url, data=None, method=None):
"""
Send an HTTP request to the remote server.
:Args:
- method - A string for the HTTP method to send the request with.
- url - The URL to send the request to.
- body - The message body to send.
:Returns:
A dictionary with the server's parsed JSON response.
"""
LOGGER.debug('%s %s %s' % (method, url, data))
parsed_url = urlparse.urlparse(url)
auth = None
password_manager = None
if parsed_url.username:
netloc = parsed_url.hostname
if parsed_url.port:
netloc += ":%s" % parsed_url.port
cleaned_url = urlparse.urlunparse((parsed_url.scheme, netloc, parsed_url.path,
parsed_url.params, parsed_url.query, parsed_url.fragment))
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, "%s://%s" % (parsed_url.scheme, netloc), parsed_url.username, parsed_url.password)
request = Request(cleaned_url, data=data, method=method)
else:
request = Request(url, data=data, method=method)
request.add_header('Accept', 'application/json')
if password_manager:
opener = urllib2.build_opener(urllib2.HTTPRedirectHandler(),
HttpErrorHandler(),
urllib2.HTTPBasicAuthHandler(password_manager))
else:
opener = urllib2.build_opener(urllib2.HTTPRedirectHandler(),
HttpErrorHandler())
response = opener.open(request)
try:
if response.code > 399 and response.code < 500:
return {'status': response.code, 'value': response.read()}
body = response.read().replace('\x00', '').strip()
content_type = response.info().getheader('Content-Type') or []
if 'application/json' in content_type:
data = utils.load_json(body.strip())
assert type(data) is dict, (
'Invalid server response body: %s' % body)
assert 'status' in data, (
'Invalid server response; no status: %s' % body)
# Some of the drivers incorrectly return a response
# with no 'value' field when they should return null.
if 'value' not in data:
data['value'] = None
return data
elif 'image/png' in content_type:
data = {'status': 0, 'value': body.strip()}
return data
finally:
response.close()
| |
# Copyright (C) 2015, Red Hat, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import sys
import unittest
from base64 import b64decode
try:
from unittest import mock
except ImportError: # pragma: no cover
import mock
from dns.rdataclass import IN as RDCLASS_IN
from dns.rdatatype import SRV as RDTYPE_SRV
from dns.rdtypes.IN.SRV import SRV
try:
from webtest import TestApp as WebTestApp
except ImportError:
print("webtest not installed! Tests will be skipped")
WebTestApp = "skip"
import kdcproxy
from kdcproxy import codec
from kdcproxy import config
from kdcproxy.config import mit
HERE = os.path.dirname(os.path.abspath(__file__))
KRB5_CONFIG = os.path.join(HERE, 'tests.krb5.conf')
@unittest.skipIf(WebTestApp == "skip", "webtest not installed")
class KDCProxyWSGITests(unittest.TestCase):
addrinfo = [
(2, 1, 6, '', ('128.66.0.2', 88)),
(2, 2, 17, '', ('128.66.0.2', 88)),
(2, 3, 0, '', ('128.66.0.2', 88))
]
def setUp(self): # noqa
self.app = kdcproxy.Application()
self.await_reply = self.app._Application__await_reply = mock.Mock()
self.await_reply.return_value = b'RESPONSE'
self.resolver = self.app._Application__resolver = mock.Mock()
self.resolver.lookup.return_value = ["kerberos://k1.kdcproxy.test.:88"]
self.tapp = WebTestApp(self.app)
def post(self, body, expect_errors=False):
return self.tapp.post(
'/', body, [("Content-Type", "application/kerberos")],
expect_errors=expect_errors
)
def assert_response(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, 'application/kerberos')
self.assertEqual(response.body, b'0\x0c\xa0\n\x04\x08RESPONSE')
def test_get(self):
r = self.tapp.get('/', expect_errors=True)
self.assertEqual(r.status_code, 405)
self.assertEqual(r.status, '405 Method Not Allowed')
self.assertEqual(r.text, 'Method not allowed (GET).')
@mock.patch('socket.getaddrinfo', return_value=addrinfo)
@mock.patch('socket.socket')
def test_post_asreq(self, m_socket, m_getaddrinfo):
response = self.post(KDCProxyCodecTests.asreq1)
self.assert_response(response)
self.resolver.lookup.assert_called_once_with('FREEIPA.LOCAL',
kpasswd=False)
m_getaddrinfo.assert_called_once_with('k1.kdcproxy.test.', 88)
m_socket.assert_called_once_with(2, 1, 6)
m_socket.return_value.connect.assert_called_once_with(
('128.66.0.2', 88)
)
@mock.patch('socket.getaddrinfo', return_value=addrinfo)
@mock.patch('socket.socket')
def test_post_kpasswd(self, m_socket, m_getaddrinfo):
response = self.post(KDCProxyCodecTests.kpasswdreq)
self.assert_response(response)
self.resolver.lookup.assert_called_once_with('FREEIPA.LOCAL',
kpasswd=True)
m_getaddrinfo.assert_called_once_with('k1.kdcproxy.test.', 88)
m_socket.assert_called_once_with(2, 1, 6)
m_socket.return_value.connect.assert_called_once_with(
('128.66.0.2', 88)
)
def test_no_server(self):
self.resolver.lookup.reset_mock()
self.resolver.lookup.return_value = []
response = self.post(KDCProxyCodecTests.asreq1, True)
self.resolver.lookup.assert_called_once_with('FREEIPA.LOCAL',
kpasswd=False)
self.assertEqual(response.status_code, 503)
self.resolver.lookup.reset_mock()
self.resolver.lookup.return_value = []
response = self.post(KDCProxyCodecTests.kpasswdreq, True)
self.resolver.lookup.assert_called_once_with('FREEIPA.LOCAL',
kpasswd=True)
self.assertEqual(response.status_code, 503)
def decode(data):
data = data.replace(b'\\n', b'')
data = data.replace(b' ', b'')
return b64decode(data)
class KDCProxyCodecTests(unittest.TestCase):
realm = 'FREEIPA.LOCAL'
asreq1 = decode(b"""
MIHEoIGwBIGtAAAAqWqBpjCBo6EDAgEFogMCAQqjDjAMMAqhBAICAJWiAgQApIGGMIGDo
AcDBQBAAAAQoRIwEKADAgEBoQkwBxsFYWRtaW6iDxsNRlJFRUlQQS5MT0NBTKMiMCCgAw
IBAqEZMBcbBmtyYnRndBsNRlJFRUlQQS5MT0NBTKURGA8yMDE1MDUxNDEwNDIzOFqnBgI
EEchjtagUMBICARICARECARACARcCARkCARqhDxsNRlJFRUlQQS5MT0NBTA==
""")
asreq2 = decode(b"""
MIIBJaCCARAEggEMAAABCGqCAQQwggEAoQMCAQWiAwIBCqNrMGkwDaEEAgIAhaIFBANNS
VQwTKEDAgECokUEQzBBoAMCARKiOgQ48A25MkXWM1ZrTvaYMJcbFX7Hp7JW11omIwqOQd
SSGKVZ9mzYLuL19RRhX9xrXbQS0klXRVgRWHMwCqEEAgIAlaICBACkgYYwgYOgBwMFAEA
AABChEjAQoAMCAQGhCTAHGwVhZG1pbqIPGw1GUkVFSVBBLkxPQ0FMoyIwIKADAgECoRkw
FxsGa3JidGd0Gw1GUkVFSVBBLkxPQ0FMpREYDzIwMTUwNTE0MTA0MjM4WqcGAgRXSy38q
BQwEgIBEgIBEQIBEAIBFwIBGQIBGqEPGw1GUkVFSVBBLkxPQ0FM
""")
tgsreq = decode(b"""
MIIDxaCCA7AEggOsAAADqGyCA6QwggOgoQMCAQWiAwIBDKOCAxowggMWMIICL6EDAgEBo
oICJgSCAiJuggIeMIICGqADAgEFoQMCAQ6iBwMFAAAAAACjggFGYYIBQjCCAT6gAwIBBa
EPGw1GUkVFSVBBLkxPQ0FMoiIwIKADAgECoRkwFxsGa3JidGd0Gw1GUkVFSVBBLkxPQ0F
Mo4IBADCB/aADAgESoQMCAQGigfAEge3ODJahLoTF0Xl+DeWdBqy79TSJv6+L23WEuBQi
CnvmiLGxFhe/zuW6LN9O0Ekb3moX4qFKW7bF/gw0GuuMemkIjLaZ2M5mZiaQQ456fU5dA
+ntLs8C407x3TVu68TM1aDvQgyKVpQgTdjxTZVmdinueIxOQ5z2nTIyjA9W94umGrPIcc
sOfwvTEqyVpXrQcXr2tj/o/WcDLh/hHMhlHRBr9uLBLdVh2xR1yRbwe/n1UsXckxRi/A/
+YgGSW7YDFBXij9RpGaE0bpa8e4u/EkcQEgu66nwVrfNs/TvsTJ1VnL5LpicDZvXzm0gO
y3OkgbowgbegAwIBEqKBrwSBrIWE4ylyvY7JpiGCJQJKpv8sd3tFK054UTDvs1UuBAiWz
IwNOddrdb4YKKGC/ce3e/sX+CBvISNPsOqX4skXK0gnMCJaCU6H1QKNeJu1TJm8GxPQ28
1B8ZrCnv9Vzput0YIXAFK1eoAfe9qnJVktLL9uwYfV7D4GDU634KtEvPeDTBVMmTVXpUR
5HIXiE4Qw6bON74Ssg4n8YDoO0ZXdOIOOUh1+soMoUzjg2XIwgeChBAICAIiigdcEgdSg
gdEwgc6hFzAVoAMCARChDgQMmmZqel1e6bYuSZBxooGyMIGvoAMCARKigacEgaQwxX40v
E6S6aNej2Siwkr/JA/70sbSoR8JrET9q6DW0rtawnOzKGYYSNEs8GLWgeSQaqIKuWXDuT
R898vv3RYY4nn1wSNQFFSOHxaVqdRzY55Z7HbO7OPTyQhPI31f1m8Tuxl7kpMM74Yhypj
iQCe8RHrJUyCQay8AonQY11pRvRlwzcnbrB5GhegVmtp1Qhtv0Lj//yLHZ4MdVh5FV2N2
8odz7KR2MHSgBwMFAEABAACiDxsNRlJFRUlQQS5MT0NBTKMnMCWgAwIBAaEeMBwbBGh0d
HAbFGlwYXNydi5mcmVlaXBhLmxvY2FspREYDzIwMTUwNTE0MTA0MjM4WqcGAgRVUzCzqB
QwEgIBEgIBEQIBEAIBFwIBGQIBGqEPGw1GUkVFSVBBLkxPQ0FM
""")
kpasswdreq = decode(b"""
MIICeKCCAmMEggJfAAACWwJbAAECAm6CAf4wggH6oAMCAQWhAwIBDqIHAwUAAAAAAKOCA
UFhggE9MIIBOaADAgEFoQ8bDUZSRUVJUEEuTE9DQUyiHTAboAMCAQGhFDASGwZrYWRtaW
4bCGNoYW5nZXB3o4IBADCB/aADAgESoQMCAQGigfAEge3swqU5Z7QS15Hf8+o9UPdl3H7
Xx+ZpEsg2Fj9b0KB/xnnkbTbJs4oic8h30jOtVfq589lWN/jx3CIRdyPndTfJLZCQZN4Q
sm6Gye/czzfMFtIOdYSdDL0EpW5/adRsbX253dxqy7431s9Jxsx4xXIowOkD/cCHcrAw3
SLchLXVXGbgcnnphAo+po8cJ7omMF0c0F0eOplKQkbbjoNJSO/TeIQJdgmUrxpy9c8Uhc
ScdkajtyxGD9YvXDc8Ik7OCFn03e9bd791qasiBSTgCjWjV3IvcDohjF/RpxftA5LxmGS
/C1KSG1AZBqivSMOkgZ8wgZygAwIBEqKBlASBkerR33SV6Gv+yTLbqByadkgmCAu4w1ms
NifEss5TAhcEJEnpyqPbZgMfvksc+ULsnsdzovskhd1NbhJx+f9B0mxUzpNw1uRXMVbNw
FGUSlYwVr+h1Hzs7/PLSsRV/jPNA+kbqbTcIkPOWe8OGGWuvbp24w6yrY3rcUCbEfhs+m
xuSIJwMDwEUb2GqRwTkBhCGgd1UTBPoAMCAQWhAwIBFaNDMEGgAwIBEqI6BDh433pZMyL
WiOUtyZnqOyiMoCe7ulv7TVyE5PGccaA3vXPzzBwh5P9wEFDl0alUBuHOKgBbtzOAgKEP
Gw1GUkVFSVBBLkxPQ0FM
""")
def assert_decode(self, data, cls):
# manual decode
request, realm, _ = codec.asn1mod.decode_proxymessage(data)
self.assertEqual(realm, self.realm)
inst = cls.parse_request(realm, request)
self.assertIsInstance(inst, cls)
self.assertEqual(inst.realm, self.realm)
self.assertEqual(inst.request, request)
if cls is codec.KPASSWDProxyRequest:
self.assertEqual(inst.version, 1)
# codec decode
outer = codec.decode(data)
self.assertEqual(outer.realm, self.realm)
self.assertIsInstance(outer, cls)
# re-decode
der = codec.encode(outer.request)
self.assertIsInstance(der, bytes)
decoded = codec.decode(der)
self.assertIsInstance(decoded, cls)
return outer
def test_asreq(self):
outer = self.assert_decode(self.asreq1, codec.ASProxyRequest)
self.assertEqual(str(outer), 'FREEIPA.LOCAL AS-REQ (169 bytes)')
outer = self.assert_decode(self.asreq2, codec.ASProxyRequest)
self.assertEqual(str(outer), 'FREEIPA.LOCAL AS-REQ (264 bytes)')
def test_tgsreq(self):
outer = self.assert_decode(self.tgsreq, codec.TGSProxyRequest)
self.assertEqual(str(outer), 'FREEIPA.LOCAL TGS-REQ (936 bytes)')
def test_kpasswdreq(self):
outer = self.assert_decode(self.kpasswdreq,
codec.KPASSWDProxyRequest)
self.assertEqual(
str(outer),
'FREEIPA.LOCAL KPASSWD-REQ (603 bytes) (version 0x0001)'
)
def test_asn1mod(self):
modmap = {
'asn1crypto': (
'kdcproxy.parse_asn1crypto', 'kdcproxy.parse_pyasn1'),
'pyasn1': (
'kdcproxy.parse_pyasn1', 'kdcproxy.parse_asn1crypto'),
}
asn1mod = os.environ.get('KDCPROXY_ASN1MOD', None)
if asn1mod is None:
self.fail("Tests require KDCPROXY_ASN1MOD env var.")
self.assertIn(asn1mod, modmap)
mod, opposite = modmap[asn1mod]
self.assertIn(mod, set(sys.modules))
self.assertNotIn(opposite, set(sys.modules))
class KDCProxyConfigTests(unittest.TestCase):
def test_mit_config(self):
with mock.patch.dict('os.environ', {'KRB5_CONFIG': KRB5_CONFIG}):
cfg = mit.MITConfig()
self.assertIs(cfg.use_dns(), False)
self.assertEqual(
cfg.lookup('KDCPROXY.TEST'),
(
'kerberos://k1.kdcproxy.test.:88',
'kerberos://k2.kdcproxy.test.:1088'
)
)
# wrong? man page says port 464 on admin server
self.assertEqual(
cfg.lookup('KDCPROXY.TEST', kpasswd=True),
(
'kpasswd://adm.kdcproxy.test.:1749',
'kpasswd://adm.kdcproxy.test.'
)
)
self.assertEqual(
cfg.lookup('KDCPROXY.TEST', kpasswd=True),
cfg.lookup('KDCPROXY.TEST', True)
)
self.assertEqual(cfg.lookup('KDCPROXY.MISSING'), ())
self.assertEqual(cfg.lookup('KDCPROXY.MISSING', True), ())
def mksrv(self, txt):
priority, weight, port, target = txt.split(' ')
return SRV(
rdclass=RDCLASS_IN, # Internet
rdtype=RDTYPE_SRV, # Server Selector
priority=int(priority),
weight=int(weight),
port=int(port),
target=target
)
@mock.patch('dns.resolver.query')
def test_dns_config(self, m_query):
cfg = config.DNSResolver()
tcp = [
self.mksrv('30 100 88 k1_tcp.kdcproxy.test.'),
self.mksrv('10 100 1088 k2_tcp.kdcproxy.test.'),
]
udp = [
self.mksrv('0 100 88 k1_udp.kdcproxy.test.'),
self.mksrv('10 100 1088 k2_udp.kdcproxy.test.'),
self.mksrv('0 100 88 k3_udp.kdcproxy.test.'),
]
m_query.side_effect = [tcp, udp]
self.assertEqual(
tuple(cfg.lookup('KDCPROXY.TEST')),
(
'kerberos://k2_tcp.kdcproxy.test:1088',
'kerberos://k1_tcp.kdcproxy.test:88',
'kerberos://k1_udp.kdcproxy.test:88',
'kerberos://k3_udp.kdcproxy.test:88',
'kerberos://k2_udp.kdcproxy.test:1088'
)
)
self.assertEqual(m_query.call_count, 2)
m_query.assert_any_call('_kerberos._tcp.KDCPROXY.TEST', RDTYPE_SRV)
m_query.assert_any_call('_kerberos._udp.KDCPROXY.TEST', RDTYPE_SRV)
m_query.reset_mock()
adm = [
self.mksrv('0 0 749 adm.kdcproxy.test.'),
]
empty = []
m_query.side_effect = (empty, adm, empty, empty)
self.assertEqual(
tuple(cfg.lookup('KDCPROXY.TEST', kpasswd=True)),
(
'kpasswd://adm.kdcproxy.test:749',
)
)
self.assertEqual(m_query.call_count, 4)
m_query.assert_any_call('_kpasswd._tcp.KDCPROXY.TEST', RDTYPE_SRV)
m_query.assert_any_call('_kerberos-adm._tcp.KDCPROXY.TEST', RDTYPE_SRV)
m_query.assert_any_call('_kpasswd._udp.KDCPROXY.TEST', RDTYPE_SRV)
m_query.assert_any_call('_kerberos-adm._udp.KDCPROXY.TEST', RDTYPE_SRV)
if __name__ == "__main__":
unittest.main()
| |
import json
import re
from django.core.cache import cache
from django.test import SimpleTestCase
from corehq.util.es.elasticsearch import ConnectionError
from corehq.apps.es.tests.utils import es_test
from unittest.mock import patch
from openpyxl import load_workbook
from couchexport.export import get_writer
from couchexport.models import Format
from couchexport.transforms import couch_to_excel_datetime
from pillowtop.es_utils import initialize_index_and_mapping
from corehq.apps.export.const import (
CASE_NAME_TRANSFORM,
DEID_DATE_TRANSFORM,
EMPTY_VALUE,
MISSING_VALUE,
)
from corehq.apps.export.export import (
ExportFile,
_ExportWriter,
get_export_file,
get_export_writer,
write_export_instance,
)
from corehq.apps.export.models import (
MAIN_TABLE,
CaseExportInstance,
ExportColumn,
ExportItem,
FormExportInstance,
MultipleChoiceItem,
Option,
PathNode,
ScalarItem,
SplitExportColumn,
StockFormExportColumn,
StockItem,
TableConfiguration,
)
from corehq.apps.export.tests.util import (
DEFAULT_CASE_TYPE,
DOMAIN,
get_export_json,
new_case,
)
from corehq.elastic import get_es_new, send_to_elasticsearch
from corehq.pillows.mappings.case_mapping import CASE_INDEX_INFO
from corehq.util.elastic import ensure_index_deleted
from corehq.util.files import TransientTempfile
from corehq.util.test_utils import flag_enabled, trap_extra_setup
def assert_instance_gives_results(docs, export_instance, expected_result):
with TransientTempfile() as temp_path:
writer = get_export_writer([export_instance], temp_path)
with writer.open([export_instance]):
write_export_instance(writer, export_instance, docs)
with ExportFile(writer.path, writer.format) as export:
assert json.loads(export.read()) == expected_result
class WriterTest(SimpleTestCase):
docs = [
{
'domain': 'my-domain',
'_id': '1234',
"form": {
"q1": "foo",
"q2": {
"q4": "bar",
},
"q3": "baz",
"mc": "two extra"
}
},
{
'domain': 'my-domain',
'_id': '12345',
"form": {
"q1": "bip",
"q2": {
"q4": "boop",
},
"q3": "bop",
"mc": "one two",
"date": "2015-07-22T14:16:49.584880Z",
}
},
]
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_simple_table(self, export_save):
"""
Confirm that some simple documents and a simple FormExportInstance
are writtern with _write_export_file() correctly
"""
export_instance = FormExportInstance(
export_format=Format.JSON,
tables=[
TableConfiguration(
label="My table",
selected=True,
columns=[
ExportColumn(
label="Q3",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True
),
ExportColumn(
label="Q1",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q1')],
),
selected=True
),
]
)
]
)
assert_instance_gives_results(self.docs, export_instance, {
'My table': {
'headers': ['Q3', 'Q1'],
'rows': [['baz', 'foo'], ['bop', 'bip']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
@patch('corehq.apps.export.export.MAX_EXPORTABLE_ROWS', 2)
@flag_enabled('PAGINATED_EXPORTS')
def test_paginated_table(self, export_save):
export_instance = FormExportInstance(
export_format=Format.JSON,
tables=[
TableConfiguration(
label="My table",
selected=True,
columns=[
ExportColumn(
label="Q3",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True
),
ExportColumn(
label="Q1",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q1')],
),
selected=True
),
]
)
]
)
assert_instance_gives_results(self.docs + self.docs, export_instance, {
'My table_000': {
'headers': ['Q3', 'Q1'],
'rows': [['baz', 'foo'], ['bop', 'bip']],
},
'My table_001': {
'headers': ['Q3', 'Q1'],
'rows': [['baz', 'foo'], ['bop', 'bip']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_split_questions(self, export_save):
"""Ensure columns are split when `split_multiselects` is set to True"""
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
split_multiselects=True,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
SplitExportColumn(
label="MC",
item=MultipleChoiceItem(
path=[PathNode(name='form'), PathNode(name='mc')],
options=[
Option(value='one'),
Option(value='two'),
]
),
selected=True,
)
]
)]
)
assert_instance_gives_results(self.docs, export_instance, {
'My table': {
'headers': ['MC | one', 'MC | two', 'MC | extra'],
'rows': [[EMPTY_VALUE, 1, 'extra'], [1, 1, '']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_array_data_in_scalar_question(self, export_save):
'''
This test ensures that when a question id has array data
that we return still return a string for scalar data.
This happens rarely
'''
doc = {
'domain': 'my-domain',
'_id': '12345',
"form": {
"array": ["one", "two"],
}
}
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
xmlns='xmlns',
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="Scalar Array",
item=ScalarItem(path=[PathNode(name='form'), PathNode(name='array')]),
selected=True,
)
]
)]
)
assert_instance_gives_results([doc], export_instance, {
'My table': {
'headers': ['Scalar Array'],
'rows': [['one two']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_form_stock_columns(self, export_save):
"""Ensure that we can export stock properties in a form export"""
docs = [{
'_id': 'simone-biles',
'domain': DOMAIN,
'form': {
'balance': [
{
'@type': 'question-id',
'entry': {
'@quantity': '2',
}
}, {
'@type': 'other-question-id',
'entry': {
'@quantity': '3',
}
}]
},
}, {
'_id': 'sam-mikulak',
'domain': DOMAIN,
'form': {
'balance': {
'@type': 'question-id',
'entry': {
'@quantity': '2',
}
},
},
}, {
'_id': 'kerri-walsh',
'domain': DOMAIN,
'form': {
'balance': {
'@type': 'other-question-id',
'entry': {
'@quantity': '2',
}
},
},
}, {
'_id': 'april-ross',
'domain': DOMAIN,
'form': {},
}]
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
StockFormExportColumn(
label="StockItem @type",
item=StockItem(
path=[
PathNode(name='form'),
PathNode(name='balance:question-id'),
PathNode(name='@type'),
],
),
selected=True,
),
StockFormExportColumn(
label="StockItem @quantity",
item=StockItem(
path=[
PathNode(name='form'),
PathNode(name='balance:question-id'),
PathNode(name='entry'),
PathNode(name='@quantity'),
],
),
selected=True,
),
]
)]
)
assert_instance_gives_results(docs, export_instance, {
'My table': {
'headers': ['StockItem @type', 'StockItem @quantity'],
'rows': [
['question-id', '2'],
['question-id', '2'],
[MISSING_VALUE, MISSING_VALUE],
[MISSING_VALUE, MISSING_VALUE],
],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_transform_dates(self, export_save):
"""Ensure dates are transformed for excel when `transform_dates` is set to True"""
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
transform_dates=True,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="Date",
item=MultipleChoiceItem(
path=[PathNode(name='form'), PathNode(name='date')],
),
selected=True,
)
]
)]
)
assert_instance_gives_results(self.docs, export_instance, {
'My table': {
'headers': ['Date'],
'rows': [[MISSING_VALUE], [couch_to_excel_datetime('2015-07-22T14:16:49.584880Z', None)]],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_split_questions_false(self, export_save):
"""Ensure multiselects are not split when `split_multiselects` is set to False"""
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
split_multiselects=False,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
SplitExportColumn(
label="MC",
item=MultipleChoiceItem(
path=[PathNode(name='form'), PathNode(name='mc')],
options=[
Option(value='one'),
Option(value='two'),
]
),
selected=True,
)
]
)]
)
assert_instance_gives_results(self.docs, export_instance, {
'My table': {
'headers': ['MC'],
'rows': [['two extra'], ['one two']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_multi_table(self, export_save):
export_instance = FormExportInstance(
export_format=Format.JSON,
tables=[
TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="Q3",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True,
),
]
),
TableConfiguration(
label="My other table",
selected=True,
path=[PathNode(name='form', is_repeat=False), PathNode(name="q2", is_repeat=False)],
columns=[
ExportColumn(
label="Q4",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q2'), PathNode(name='q4')],
),
selected=True,
),
]
)
]
)
assert_instance_gives_results(self.docs, export_instance, {
'My table': {
'headers': ['Q3'],
'rows': [['baz'], ['bop']],
},
'My other table': {
'headers': ['Q4'],
'rows': [['bar'], ['boop']],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_multi_table_order(self, export_save):
tables = [
TableConfiguration(
label="My table {}".format(i),
selected=True,
path=[],
columns=[
ExportColumn(
label="Q{}".format(i),
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q{}'.format(i))],
),
selected=True,
),
]
)
for i in range(10)
]
export_instance = FormExportInstance(
export_format=Format.HTML,
tables=tables
)
docs = [
{
'domain': 'my-domain',
'_id': '1234',
"form": {'q{}'.format(i): 'value {}'.format(i) for i in range(10)}
}
]
with TransientTempfile() as temp_path:
writer = get_export_writer([export_instance], temp_path)
with writer.open([export_instance]):
write_export_instance(writer, export_instance, docs)
with ExportFile(writer.path, writer.format) as export:
exported_tables = [table for table in re.findall(b'<table>', export.read())]
expected_tables = [t.label for t in tables]
self.assertEqual(len(expected_tables), len(exported_tables))
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_multiple_write_export_instance_calls(self, export_save):
"""
Confirm that calling _write_export_instance() multiple times
(as part of a bulk export) works as expected.
"""
export_instances = [
FormExportInstance(
tables=[
TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="Q3",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True,
),
]
),
]
),
FormExportInstance(
tables=[
TableConfiguration(
label="My other table",
selected=True,
path=[PathNode(name="form", is_repeat=False), PathNode(name="q2", is_repeat=False)],
columns=[
ExportColumn(
label="Q4",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q2'), PathNode(name='q4')],
),
selected=True,
),
]
)
]
),
FormExportInstance(
tables=[
TableConfiguration(
label="My other table",
selected=True,
path=[PathNode(name="form", is_repeat=False), PathNode(name="q2", is_repeat=False)],
columns=[
ExportColumn(
label="Q4",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q2'), PathNode(name='q4')],
),
selected=True,
),
]
)
]
)
]
with TransientTempfile() as temp_path:
writer = _ExportWriter(get_writer(Format.JSON), temp_path)
with writer.open(export_instances):
write_export_instance(writer, export_instances[0], self.docs)
write_export_instance(writer, export_instances[1], self.docs)
write_export_instance(writer, export_instances[2], self.docs)
with ExportFile(writer.path, writer.format) as export:
self.assertEqual(
json.loads(export.read()),
{
'My table': {
'headers': ['Q3'],
'rows': [['baz'], ['bop']],
},
'Export2-My other table': {
'headers': ['Q4'],
'rows': [['bar'], ['boop']],
},
'Export3-My other table': {
'headers': ['Q4'],
'rows': [['bar'], ['boop']],
},
}
)
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_empty_location(self, export_save):
export_instance = FormExportInstance(
export_format=Format.JSON,
tables=[
TableConfiguration(
label="My table",
selected=True,
columns=[
ExportColumn(
label="location",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='meta'), PathNode(name='location')],
),
selected=True
),
]
)
]
)
docs = [
{
'domain': 'my-domain',
'_id': '1234',
'form': {
'meta': {
'location': {'xmlns': 'abc'},
}
}
}
]
assert_instance_gives_results(docs, export_instance, {
'My table': {
'headers': ['location'],
'rows': [[EMPTY_VALUE]],
}
})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_empty_table_label(self, export_save):
export_instance = FormExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
split_multiselects=True,
tables=[TableConfiguration(
label="",
selected=True,
path=[],
columns=[
ExportColumn(
label="Q1",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q1')],
),
selected=True
),
]
)]
)
assert_instance_gives_results(self.docs, export_instance, {
'Sheet1': {
'headers': ['Q1'],
'rows': [['foo'], ['bip']],
}
})
self.assertTrue(export_save.called)
@es_test
class ExportTest(SimpleTestCase):
@classmethod
def setUpClass(cls):
super(ExportTest, cls).setUpClass()
with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"):
cls.es = get_es_new()
initialize_index_and_mapping(cls.es, CASE_INDEX_INFO)
case = new_case(
case_id='robin',
name='batman',
case_json={"foo": "apple", "bar": "banana", "date": '2016-4-24'},
)
send_to_elasticsearch('cases', case.to_json())
case = new_case(
owner_id="some_other_owner",
case_json={"foo": "apple", "bar": "banana", "date": '2016-4-04'},
)
send_to_elasticsearch('cases', case.to_json())
case = new_case(type="some_other_type", case_json={"foo": "apple", "bar": "banana"})
send_to_elasticsearch('cases', case.to_json())
case = new_case(closed=True, case_json={"foo": "apple", "bar": "banana"})
send_to_elasticsearch('cases', case.to_json())
cls.es.indices.refresh(CASE_INDEX_INFO.index)
cache.clear()
@classmethod
def tearDownClass(cls):
ensure_index_deleted(CASE_INDEX_INFO.index)
cache.clear()
super(ExportTest, cls).tearDownClass()
@patch('corehq.apps.export.models.CaseExportInstance.save')
def test_get_export_file(self, export_save):
export_json = get_export_json(
CaseExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="Foo column",
item=ExportItem(
path=[PathNode(name="foo")]
),
selected=True,
),
ExportColumn(
label="Bar column",
item=ExportItem(
path=[PathNode(name="bar")]
),
selected=True,
)
]
)]
),
)
self.assertEqual(
export_json,
{
'My table': {
'headers': [
'Foo column',
'Bar column'],
'rows': [
['apple', 'banana'],
['apple', 'banana'],
['apple', 'banana'],
],
}
}
)
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.FormExportInstance.save')
def test_case_name_transform(self, export_save):
docs = [
{
'domain': 'my-domain',
'_id': '1234',
"form": {
"caseid": "robin",
},
},
{
'domain': 'my-domain',
'_id': '1234',
"form": {
"caseid": "i-do-not-exist",
},
}
]
export_instance = FormExportInstance(
export_format=Format.JSON,
tables=[
TableConfiguration(
label="My table",
selected=True,
columns=[
ExportColumn(
label="case_name",
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='caseid')],
transform=CASE_NAME_TRANSFORM,
),
selected=True
),
]
)
]
)
assert_instance_gives_results(docs, export_instance, {
'My table': {
'headers': ['case_name'],
'rows': [['batman'], [MISSING_VALUE]],
}
})
self.assertTrue(export_save.called)
@patch('couchexport.deid.DeidGenerator.random_number', return_value=3)
@patch('corehq.apps.export.models.CaseExportInstance.save')
def test_export_transforms(self, export_save, _):
export_json = get_export_json(
CaseExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
tables=[TableConfiguration(
label="My table",
selected=True,
path=[],
columns=[
ExportColumn(
label="DEID Date Transform column",
item=ExportItem(
path=[PathNode(name="date")]
),
selected=True,
deid_transform=DEID_DATE_TRANSFORM,
)
]
)]
),
)
export_json['My table']['rows'].sort()
self.assertEqual(
export_json,
{
'My table': {
'headers': [
'DEID Date Transform column *sensitive*',
],
'rows': [
[MISSING_VALUE],
['2016-04-07'],
['2016-04-27'], # offset by 3 since that's the mocked random offset
],
}
}
)
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.CaseExportInstance.save')
def test_selected_false(self, export_save):
export_json = get_export_json(
CaseExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
tables=[TableConfiguration(
label="My table",
selected=False,
path=[],
columns=[]
)]
)
)
self.assertEqual(export_json, {})
self.assertTrue(export_save.called)
@patch('corehq.apps.export.models.CaseExportInstance.save')
def test_simple_bulk_export(self, export_save):
with TransientTempfile() as temp_path:
export_file = get_export_file(
[
CaseExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
tables=[TableConfiguration(
selected=True,
label="My table",
path=MAIN_TABLE,
columns=[
ExportColumn(
label="Foo column",
item=ExportItem(
path=[PathNode(name="foo")]
),
selected=True,
),
]
)]
),
CaseExportInstance(
export_format=Format.JSON,
domain=DOMAIN,
case_type=DEFAULT_CASE_TYPE,
tables=[TableConfiguration(
label="My table",
selected=True,
path=MAIN_TABLE,
columns=[
ExportColumn(
label="Bar column",
item=ExportItem(
path=[PathNode(name="bar")]
),
selected=True,
)
]
)]
),
],
[], # No filters
temp_path,
)
expected = {
'Export1-My table': {
"A1": "Foo column",
"A2": "apple",
"A3": "apple",
"A4": "apple",
},
"Export2-My table": {
"A1": "Bar column",
"A2": "banana",
"A3": "banana",
"A4": "banana",
},
}
with export_file as export:
wb = load_workbook(export)
self.assertEqual(wb.get_sheet_names(), ["Export1-My table", "Export2-My table"])
for sheet in expected.keys():
for cell in expected[sheet].keys():
self.assertEqual(
wb[sheet][cell].value,
expected[sheet][cell],
'AssertionError: Sheet "{}", cell "{}" expected: "{}", got "{}"'.format(
sheet, cell, expected[sheet][cell], wb[sheet][cell].value
)
)
self.assertTrue(export_save.called)
class TableHeaderTest(SimpleTestCase):
def test_deid_column_headers(self):
col = ExportColumn(
label="my column",
deid_transform="deid_id",
)
self.assertEqual(col.get_headers(), ["my column *sensitive*"])
| |
"""
fs.multifs
==========
A MultiFS is a filesystem composed of a sequence of other filesystems, where
the directory structure of each filesystem is overlaid over the previous
filesystem. When you attempt to access a file from the MultiFS it will try
each 'child' FS in order, until it either finds a path that exists or raises a
ResourceNotFoundError.
One use for such a filesystem would be to selectively override a set of files,
to customize behavior. For example, to create a filesystem that could be used
to *theme* a web application. We start with the following directories::
`-- templates
|-- snippets
| `-- panel.html
|-- index.html
|-- profile.html
`-- base.html
`-- theme
|-- snippets
| |-- widget.html
| `-- extra.html
|-- index.html
`-- theme.html
And we want to create a single filesystem that looks for files in `templates` if
they don't exist in `theme`. We can do this with the following code::
from fs.osfs import OSFS
from fs.multifs import MultiFS
themed_template_fs = MultiFS()
themed_template_fs.addfs('templates', OSFS('templates'))
themed_template_fs.addfs('theme', OSFS('theme'))
Now we have a `themed_template_fs` FS object presents a single view of both
directories::
|-- snippets
| |-- panel.html
| |-- widget.html
| `-- extra.html
|-- index.html
|-- profile.html
|-- base.html
`-- theme.html
A MultiFS is generally read-only, and any operation that may modify data
(including opening files for writing) will fail. However, you can set a
writeable fs with the `setwritefs` method -- which does not have to be
one of the FS objects set with `addfs`.
The reason that only one FS object is ever considered for write access is
that otherwise it would be ambiguous as to which filesystem you would want
to modify. If you need to be able to modify more than one FS in the MultiFS,
you can always access them directly.
"""
from fs.base import FS, synchronize
from fs.path import *
from fs.errors import *
from fs import _thread_synchronize_default
class MultiFS(FS):
"""A filesystem that delegates to a sequence of other filesystems.
Operations on the MultiFS will try each 'child' filesystem in order, until
it succeeds. In effect, creating a filesystem that combines the files and
dirs of its children.
"""
_meta = { 'virtual': True,
'read_only' : False,
'unicode_paths' : True,
'case_insensitive_paths' : False
}
def __init__(self, auto_close=True):
"""
:param auto_close: If True the child filesystems will be closed when the MultiFS is closed
"""
super(MultiFS, self).__init__(thread_synchronize=_thread_synchronize_default)
self.auto_close = auto_close
self.fs_sequence = []
self.fs_lookup = {}
self.fs_priorities = {}
self.writefs = None
@synchronize
def __str__(self):
return "<MultiFS: %s>" % ", ".join(str(fs) for fs in self.fs_sequence)
__repr__ = __str__
@synchronize
def __unicode__(self):
return u"<MultiFS: %s>" % ", ".join(unicode(fs) for fs in self.fs_sequence)
def _get_priority(self, name):
return self.fs_priorities[name]
@synchronize
def close(self):
# Explicitly close if requested
if self.auto_close:
for fs in self.fs_sequence:
fs.close()
if self.writefs is not None:
self.writefs.close()
# Discard any references
del self.fs_sequence[:]
self.fs_lookup.clear()
self.fs_priorities.clear()
self.writefs = None
super(MultiFS, self).close()
def _priority_sort(self):
"""Sort filesystems by priority order"""
priority_order = sorted(self.fs_lookup.keys(), key=lambda n: self.fs_priorities[n], reverse=True)
self.fs_sequence = [self.fs_lookup[name] for name in priority_order]
@synchronize
def addfs(self, name, fs, write=False, priority=0):
"""Adds a filesystem to the MultiFS.
:param name: A unique name to refer to the filesystem being added.
The filesystem can later be retrieved by using this name as an index to the MultiFS, i.e. multifs['myfs']
:param fs: The filesystem to add
:param write: If this value is True, then the `fs` will be used as the writeable FS
:param priority: A number that gives the priorty of the filesystem being added.
Filesystems will be searched in descending priority order and then by the reverse order they were added.
So by default, the most recently added filesystem will be looked at first
"""
if name in self.fs_lookup:
raise ValueError("Name already exists.")
priority = (priority, len(self.fs_sequence))
self.fs_priorities[name] = priority
self.fs_sequence.append(fs)
self.fs_lookup[name] = fs
self._priority_sort()
if write:
self.setwritefs(fs)
@synchronize
def setwritefs(self, fs):
"""Sets the filesystem to use when write access is required. Without a writeable FS,
any operations that could modify data (including opening files for writing / appending)
will fail.
:param fs: An FS object that will be used to open writeable files
"""
self.writefs = fs
@synchronize
def clearwritefs(self):
"""Clears the writeable filesystem (operations that modify the multifs will fail)"""
self.writefs = None
@synchronize
def removefs(self, name):
"""Removes a filesystem from the sequence.
:param name: The name of the filesystem, as used in addfs
"""
if name not in self.fs_lookup:
raise ValueError("No filesystem called '%s'" % name)
fs = self.fs_lookup[name]
self.fs_sequence.remove(fs)
del self.fs_lookup[name]
self._priority_sort()
@synchronize
def __getitem__(self, name):
return self.fs_lookup[name]
@synchronize
def __iter__(self):
return iter(self.fs_sequence[:])
def _delegate_search(self, path):
for fs in self:
if fs.exists(path):
return fs
return None
@synchronize
def which(self, path, mode='r'):
"""Retrieves the filesystem that a given path would delegate to.
Returns a tuple of the filesystem's name and the filesystem object itself.
:param path: A path in MultiFS
"""
if 'w' in mode or '+' in mode or 'a' in mode:
return self.writefs
for fs in self:
if fs.exists(path):
for fs_name, fs_object in self.fs_lookup.iteritems():
if fs is fs_object:
return fs_name, fs
raise ResourceNotFoundError(path, msg="Path does not map to any filesystem: %(path)s")
@synchronize
def getsyspath(self, path, allow_none=False):
fs = self._delegate_search(path)
if fs is not None:
return fs.getsyspath(path, allow_none=allow_none)
if allow_none:
return None
raise ResourceNotFoundError(path)
@synchronize
def desc(self, path):
if not self.exists(path):
raise ResourceNotFoundError(path)
name, fs = self.which(path)
if name is None:
return ""
return "%s (in %s)" % (fs.desc(path), name)
@synchronize
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
if 'w' in mode or '+' in mode or 'a' in mode:
if self.writefs is None:
raise OperationFailedError('open', path=path, msg="No writeable FS set")
return self.writefs.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
for fs in self:
if fs.exists(path):
fs_file = fs.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
return fs_file
raise ResourceNotFoundError(path)
@synchronize
def exists(self, path):
return self._delegate_search(path) is not None
@synchronize
def isdir(self, path):
fs = self._delegate_search(path)
if fs is not None:
return fs.isdir(path)
return False
@synchronize
def isfile(self, path):
fs = self._delegate_search(path)
if fs is not None:
return fs.isfile(path)
return False
@synchronize
def listdir(self, path="./", *args, **kwargs):
paths = []
for fs in self:
try:
paths += fs.listdir(path, *args, **kwargs)
except FSError:
pass
return list(set(paths))
@synchronize
def makedir(self, path, recursive=False, allow_recreate=False):
if self.writefs is None:
raise OperationFailedError('makedir', path=path, msg="No writeable FS set")
self.writefs.makedir(path, recursive=recursive, allow_recreate=allow_recreate)
@synchronize
def remove(self, path):
if self.writefs is None:
raise OperationFailedError('remove', path=path, msg="No writeable FS set")
self.writefs.remove(path)
@synchronize
def removedir(self, path, recursive=False, force=False):
if self.writefs is None:
raise OperationFailedError('removedir', path=path, msg="No writeable FS set")
if normpath(path) in ('', '/'):
raise RemoveRootError(path)
self.writefs.removedir(path, recursive=recursive, force=force)
@synchronize
def rename(self, src, dst):
if self.writefs is None:
raise OperationFailedError('rename', path=src, msg="No writeable FS set")
self.writefs.rename(src, dst)
@synchronize
def settimes(self, path, accessed_time=None, modified_time=None):
if self.writefs is None:
raise OperationFailedError('settimes', path=path, msg="No writeable FS set")
self.writefs.settimes(path, accessed_time, modified_time)
@synchronize
def getinfo(self, path):
for fs in self:
if fs.exists(path):
return fs.getinfo(path)
raise ResourceNotFoundError(path)
| |
#!/usr/bin/env python
"""
Copyright 2014, BlinkMobile
"""
import argparse
import time
import sys
import re
import os
import tempfile
import simplejson
import boto.s3
import boto.ses
def main():
"""
This is designed to take MIME emails stored in S3,
and sends a new email with a signed URL to that email file.
We use this when we store emails that are over 10MB
(that SES won't send).
"""
args = argparse.ArgumentParser()
args.add_argument('--bucket', '-b',
help='Bucket Name (e.g. "blinkmobile-logs")',
required=True)
args.add_argument('--prefix', '-p', help='S3 prefix to emails that'
'should be processed (e.g. "emails")',
required=True)
args.add_argument('--region', '-r', help='AWS region (e.g. "us-west-1")',
required=True)
args.add_argument('--processedEmails', '-e',
help='S3 prefix to processed emails JSON file '
'(e.g. emails/_PROCESSING/processedEmails.json)',
required=True)
args.add_argument('--adminEmail', '-a',
help='An SES Verified Email Address to send statistics '
'to after completion (e.g. admin@admin.com)',
required=False)
options = args.parse_args()
bucket_name = options.bucket
bucket_prefix = options.prefix
bucket_region = options.region
bucket_processed_emails = options.processedEmails
administrator_email = options.adminEmail
if not administrator_email:
print "WARNING: No Administrator Email given, " \
"email summary of statistics disabled"
email_to = ""
email_from = ""
email_cc = ""
email_subject = ""
email_date = ""
email_rt = email_from
emails_sent = 0
todays_date = time.strftime("%d-%m-%Y")
processed_emails_temp = tempfile.NamedTemporaryFile(delete=False)
processed_emails_file_path = processed_emails_temp.name
message_location_temp = tempfile.NamedTemporaryFile(delete=False)
message_location = message_location_temp.name
####
# Main function begins:
####
print 'Connecting to S3 + SES via Boto...'
conn = boto.connect_s3()
ses_connection = boto.ses.connect_to_region(bucket_region)
print 'Connected.'
bucket = conn.get_bucket(bucket_name, validate=False)
####
# Preparing Processed Emails JSON File (which returns a python dictionary)
####
processed_emails = prepare_json(
bucket, bucket_processed_emails, processed_emails_file_path)
# prefix must end it in 'msg' if accessing subfolders
for key in bucket.list(prefix=bucket_prefix):
add_to_dict = False
key = bucket.get_key(key.name)
kn = key.name
# print key.name
# print key.size
key_in_mb = ((key.size / 1024) / 1024)
# print "Size (in MB): %s" % key_in_mb
if key_in_mb >= 10:
print key.name
print "Size (in MB): %s" % key_in_mb
# check against python dict
# if not in it, add and progress
if key.name in processed_emails:
add_to_dict = False
else:
add_to_dict = True
if add_to_dict:
####
# Generating Signed URL
####
signed_url = get_signed_url(key)
####
# Downloading Email to Disk (inside /tmp/)
####
print "Downloading file"
key.get_contents_to_filename(message_location)
print "File downloaded. Opening"
# clearing email_cc + email_rt as they're optional.
email_to = ""
email_from = ""
email_cc = ""
email_rt = ""
email_subject = ""
email_date = ""
with open(message_location) as key_file:
for line in key_file:
if re.match("(To):(.*)", line):
temp = line
temp_to = temp.replace("To: ", "").replace("\n", "").lstrip(' ')
email_to = temp_to.split(",")
# sometimes 'to' addresses are split over
# multiple lines.
next_line = key_file.next()
# print next_line
# and sometimes it doesn't, meaning the Subject
# is on the next line:
if re.match("(Subject):(.*)", next_line):
temp = next_line
email_subject = temp.replace(
"Subject: ", "").replace("\n", "")
elif re.match("(.*)@(.*)", next_line):
temp2 = next_line.replace("\n", "").lstrip(' ')
temp2_to = temp2.split(",")
unfiltered_email_to = email_to + temp2_to
email_to = filter(None, unfiltered_email_to)
if re.match("(.*)(From):(.*)", line):
temp = line
email_from = temp.replace("From: ", "").replace(
"\n", "")
if re.match("(.*)(Cc):(.*)", line):
temp = line
temp_cc = temp.replace("Cc: ", "").replace(
"\n", "")
email_cc = temp_cc.split(",")
# sometimes 'cc' addresses are split over
# multiple lines.
next_line = key_file.next()
# and sometimes it isn't, meaning the Reply-To
# is on the next line
if re.match("(.*)(Reply-To):(.*)", next_line):
temp = next_line
temp_rt = temp.replace("Reply-To: ", "") \
.replace("\n", "")
email_rt = temp_rt.split(",")
elif re.match("(.*)@(.*)", next_line):
temp2 = next_line.replace("\n", "").lstrip(
' ')
temp2_cc = temp2.split(",")
unfiltered_email_cc = email_cc + temp2_cc
email_cc = filter(None, unfiltered_email_cc)
if re.match("(.*)(Reply-To):(.*)", line):
temp = line
temp_rt = temp.replace("Reply-To: ", "")\
.replace("\n", "")
email_rt = temp_rt.split(",")
if re.match("(.*)(Subject):(.*)", line):
temp = line
email_subject = temp.replace(
"Subject: ", "").replace("\n", "")
if re.match("(.*)(Date):(.*)", line):
temp = line
email_date = temp.replace("Date: ", "").replace(
"\n", "")
# if email isn't in dictionary, then it hasn't been sent yet
# therefore add to dictionary and send it.
print "adding to dictionary"
processed_emails.update({
key.name: {
'date': email_date,
'emailSent': todays_date,
'email': [{
'from': email_from,
'to': email_to,
'subject': email_subject
}]
}})
send_email(
ses_connection, signed_url, email_from, email_subject,
email_to, email_cc, email_rt, email_date)
emails_sent += 1
print "-----"
####
# convert python dictionary back to JSON, upload JSON
####
print "Processing Dictionary"
process_dictionary(
bucket,
bucket_processed_emails,
processed_emails_file_path,
processed_emails)
####
# delete temp files from machine
#####
print "Deleting Temporary Files"
delete_temp_files(message_location, processed_emails_file_path)
if administrator_email:
print "Sending Statistics to: %s" % administrator_email
send_stats(
ses_connection, emails_sent, processed_emails, administrator_email)
def send_stats(ses_connection, emails_sent, processed_emails, email_address):
""" As this is designed to be run automatically on a timer,
this will compile an email with statistics (e.g. how many emails were
sent, biggest size, etc) to be sent to someone once execution is complete.
:return: Null.
"""
date = time.strftime("%d-%m-%Y")
pe_counter = len(processed_emails)
body = "Hi,\nSES Woomera just finished. Here" \
"are some stats from the execution:"\
"\n\n" \
"Today's Date: %s\n"\
"Emails sent: %s\n" \
"Emails in 'Processed Emails' JSON File: %s\n " \
% (date, emails_sent, pe_counter)
ses_connection.send_email(
source=email_address,
subject="[SES Woomera] Email Statistics -- %s" % date,
to_addresses=email_address,
body=body,
format="text")
def delete_temp_files(message_location, processed_emails_file_path):
"""
This deletes temporary files that get accumulated during the process
Including the ProcessedEmails JSON file, and any emails that need to be
sent.
:return: Null
"""
os.remove(message_location)
os.remove(processed_emails_file_path)
def prepare_json(bucket, bucket_processed_emails, processed_emails_file_path):
"""
downloads the processed emails JSON file (which keeps track of emails
that have already been processed/sent).
In addition, it converts the JSON file to a Python Dictionary, which
makes processing easier.
:return: processed_emails_dict: a python dictionary of the
JSON file downloaded from S3.
"""
key = boto.s3.key.Key(bucket=bucket,
name=bucket_processed_emails)
# if the key (json file) exists, download it from s3
# if it doesn't, create a new file in preparation to upload to S3.
if key.exists():
key.get_contents_to_filename(processed_emails_file_path)
else:
# creating file
open(processed_emails_file_path, 'a').close()
# convert to python dict
# print processed_emails_file_path
# f = open(processed_emails_file_path, 'r')
# print f.read()
processed_emails_json = open(processed_emails_file_path).read()
processed_emails_dict = {}
try:
processed_emails_dict = simplejson.loads(processed_emails_json)
print "Processed Emails Dictionary: %s" % processed_emails_dict
# this mostly occurs when the file is empty
except simplejson.scanner.JSONDecodeError:
print ""
return processed_emails_dict
def get_signed_url(key):
"""
:param key: refers to the file inside S3 that requires a signed URL
:return signedURL: a signed URL
"""
signed_url = key.generate_url(
1209600, # 14 days
method='GET',
query_auth=True,
force_http=False,
response_headers={
'response-content-type': 'binary/octet-stream'
})
return signed_url
def send_email(ses_connection, signed_url, email_from, email_subject, email_to,
email_cc, email_rt, email_date):
"""
Function designed to send an email
:param signed_url: The Signed URL
:param email_from: The "from" email address
:param email_subject: The Subject of the email
:param email_to: The "to" email address
:param email_cc: The "cc" email address/es
:param email_rt: the "Reply-To" email addresses.
:param email_date: The date that the original email was sent.
:return: Null.
"""
# print "Setting up email Body"
email_body = "Hi,\n\nThe Blink Mobility Platform attempted to " \
"process this email (Sent: %s), however the email size " \
"(including attachments) " \
"exceeded the 10MB limit.\n\n" \
"To access the email, please click the following link " \
"(which will expire in 14 days): \n\n" \
"%s" \
"\n\n" \
"Opening Instructions:\n\n " \
"1. Download the file\n " \
"2. Open the file in your chosen mail client " \
"(e.g. Apple Mail, Microsoft Outlook, etc)\n " \
"3. The email should open displaying the " \
"email body and all attachments \n\n " \
"Thanks,\n " \
"BlinkMobile Interactive" % (email_date, signed_url)
# print emailBody
print "SENDING EMAIL -- From: %s To: %s CC: %s Subject: %s" \
% (email_from, email_to, email_cc, email_subject)
ses_connection.send_email(
source=email_from,
subject=email_subject,
body=email_body,
to_addresses=email_to,
cc_addresses=email_cc,
reply_addresses=email_rt
)
def process_dictionary(bucket, bucket_processed_emails,
processed_emails_file_path, processed_emails_dict):
"""
Once emails have been sent, the processed emails dictionary gets
converted back into a JSON file and it gets uploaded to S3.
:param processed_emails_dict: the Python dictionary containing all
processed emails (that needs to be converted to a JSON file)
:return: Null.
"""
# converting from dict to json
pe_json = simplejson.dumps(processed_emails_dict)
# writing to json file
pe_file = open(processed_emails_file_path, 'w')
pe_file.write(pe_json)
pe_file.close()
# Upload to S3
# print "uploading to s3"
k = bucket.new_key(bucket_processed_emails)
k.set_contents_from_filename(processed_emails_file_path)
if __name__ == '__main__':
main()
| |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import copy
from op_test import OpTest
def iou(box_a, box_b, norm):
"""Apply intersection-over-union overlap between box_a and box_b
"""
xmin_a = min(box_a[0], box_a[2])
ymin_a = min(box_a[1], box_a[3])
xmax_a = max(box_a[0], box_a[2])
ymax_a = max(box_a[1], box_a[3])
xmin_b = min(box_b[0], box_b[2])
ymin_b = min(box_b[1], box_b[3])
xmax_b = max(box_b[0], box_b[2])
ymax_b = max(box_b[1], box_b[3])
area_a = (ymax_a - ymin_a + (norm == False)) * (xmax_a - xmin_a +
(norm == False))
area_b = (ymax_b - ymin_b + (norm == False)) * (xmax_b - xmin_b +
(norm == False))
if area_a <= 0 and area_b <= 0:
return 0.0
xa = max(xmin_a, xmin_b)
ya = max(ymin_a, ymin_b)
xb = min(xmax_a, xmax_b)
yb = min(ymax_a, ymax_b)
inter_area = max(xb - xa + (norm == False),
0.0) * max(yb - ya + (norm == False), 0.0)
iou_ratio = inter_area / (area_a + area_b - inter_area)
return iou_ratio
def nms(boxes,
scores,
score_threshold,
nms_threshold,
top_k=200,
normalized=True,
eta=1.0):
"""Apply non-maximum suppression at test time to avoid detecting too many
overlapping bounding boxes for a given object.
Args:
boxes: (tensor) The location preds for the img, Shape: [num_priors,4].
scores: (tensor) The class predscores for the img, Shape:[num_priors].
score_threshold: (float) The confidence thresh for filtering low
confidence boxes.
nms_threshold: (float) The overlap thresh for suppressing unnecessary
boxes.
top_k: (int) The maximum number of box preds to consider.
eta: (float) The parameter for adaptive NMS.
Return:
The indices of the kept boxes with respect to num_priors.
"""
all_scores = copy.deepcopy(scores)
all_scores = all_scores.flatten()
selected_indices = np.argwhere(all_scores > score_threshold)
selected_indices = selected_indices.flatten()
all_scores = all_scores[selected_indices]
sorted_indices = np.argsort(-all_scores, axis=0, kind='mergesort')
sorted_scores = all_scores[sorted_indices]
if top_k > -1 and top_k < sorted_indices.shape[0]:
sorted_indices = sorted_indices[:top_k]
sorted_scores = sorted_scores[:top_k]
selected_indices = []
adaptive_threshold = nms_threshold
for i in range(sorted_scores.shape[0]):
idx = sorted_indices[i]
keep = True
for k in range(len(selected_indices)):
if keep:
kept_idx = selected_indices[k]
overlap = iou(boxes[idx], boxes[kept_idx], normalized)
keep = True if overlap <= adaptive_threshold else False
else:
break
if keep:
selected_indices.append(idx)
if keep and eta < 1 and adaptive_threshold > 0.5:
adaptive_threshold *= eta
return selected_indices
def multiclass_nms(boxes, scores, background, score_threshold, nms_threshold,
nms_top_k, keep_top_k, normalized, shared):
if shared:
class_num = scores.shape[0]
priorbox_num = scores.shape[1]
else:
box_num = scores.shape[0]
class_num = scores.shape[1]
selected_indices = {}
num_det = 0
for c in range(class_num):
if c == background: continue
if shared:
indices = nms(boxes, scores[c], score_threshold, nms_threshold,
nms_top_k, normalized)
else:
indices = nms(boxes[:, c, :], scores[:, c], score_threshold,
nms_threshold, nms_top_k, normalized)
selected_indices[c] = indices
num_det += len(indices)
if keep_top_k > -1 and num_det > keep_top_k:
score_index = []
for c, indices in selected_indices.items():
for idx in indices:
if shared:
score_index.append((scores[c][idx], c, idx))
else:
score_index.append((scores[idx][c], c, idx))
sorted_score_index = sorted(
score_index, key=lambda tup: tup[0], reverse=True)
sorted_score_index = sorted_score_index[:keep_top_k]
selected_indices = {}
for _, c, _ in sorted_score_index:
selected_indices[c] = []
for s, c, idx in sorted_score_index:
selected_indices[c].append(idx)
if not shared:
for labels in selected_indices:
selected_indices[labels].sort()
num_det = keep_top_k
return selected_indices, num_det
def lod_multiclass_nms(boxes, scores, background, score_threshold,
nms_threshold, nms_top_k, keep_top_k, box_lod,
normalized):
det_outs = []
lod = []
head = 0
for n in range(len(box_lod[0])):
box = boxes[head:head + box_lod[0][n]]
score = scores[head:head + box_lod[0][n]]
head = head + box_lod[0][n]
nmsed_outs, nmsed_num = multiclass_nms(
box,
score,
background,
score_threshold,
nms_threshold,
nms_top_k,
keep_top_k,
normalized,
shared=False)
if nmsed_num == 0:
continue
lod.append(nmsed_num)
tmp_det_out = []
for c, indices in nmsed_outs.items():
for idx in indices:
xmin, ymin, xmax, ymax = box[idx, c, :]
tmp_det_out.append([c, score[idx][c], xmin, ymin, xmax, ymax])
sorted_det_out = sorted(
tmp_det_out, key=lambda tup: tup[0], reverse=False)
det_outs.extend(sorted_det_out)
if len(lod) == 0:
lod.append(1)
return det_outs, lod
def batched_multiclass_nms(boxes,
scores,
background,
score_threshold,
nms_threshold,
nms_top_k,
keep_top_k,
normalized=True):
batch_size = scores.shape[0]
det_outs = []
lod = []
for n in range(batch_size):
nmsed_outs, nmsed_num = multiclass_nms(
boxes[n],
scores[n],
background,
score_threshold,
nms_threshold,
nms_top_k,
keep_top_k,
normalized,
shared=True)
if nmsed_num == 0:
continue
lod.append(nmsed_num)
tmp_det_out = []
for c, indices in nmsed_outs.items():
for idx in indices:
xmin, ymin, xmax, ymax = boxes[n][idx][:]
tmp_det_out.append(
[c, scores[n][c][idx], xmin, ymin, xmax, ymax])
sorted_det_out = sorted(
tmp_det_out, key=lambda tup: tup[0], reverse=False)
det_outs.extend(sorted_det_out)
if len(lod) == 0:
lod += [1]
return det_outs, lod
class TestMulticlassNMSOp(OpTest):
def set_argument(self):
self.score_threshold = 0.01
def setUp(self):
self.set_argument()
N = 7
M = 1200
C = 21
BOX_SIZE = 4
background = 0
nms_threshold = 0.3
nms_top_k = 400
keep_top_k = 200
score_threshold = self.score_threshold
scores = np.random.random((N * M, C)).astype('float32')
def softmax(x):
shiftx = x - np.max(x).clip(-64.)
exps = np.exp(shiftx)
return exps / np.sum(exps)
scores = np.apply_along_axis(softmax, 1, scores)
scores = np.reshape(scores, (N, M, C))
scores = np.transpose(scores, (0, 2, 1))
boxes = np.random.random((N, M, BOX_SIZE)).astype('float32')
boxes[:, :, 0:2] = boxes[:, :, 0:2] * 0.5
boxes[:, :, 2:4] = boxes[:, :, 2:4] * 0.5 + 0.5
nmsed_outs, lod = batched_multiclass_nms(boxes, scores, background,
score_threshold, nms_threshold,
nms_top_k, keep_top_k)
nmsed_outs = [-1] if not nmsed_outs else nmsed_outs
nmsed_outs = np.array(nmsed_outs).astype('float32')
self.op_type = 'multiclass_nms'
self.inputs = {'BBoxes': boxes, 'Scores': scores}
self.outputs = {'Out': (nmsed_outs, [lod])}
self.attrs = {
'background_label': 0,
'nms_threshold': nms_threshold,
'nms_top_k': nms_top_k,
'keep_top_k': keep_top_k,
'score_threshold': score_threshold,
'nms_eta': 1.0,
'normalized': True,
}
def test_check_output(self):
self.check_output()
class TestMulticlassNMSOpNoOutput(TestMulticlassNMSOp):
def set_argument(self):
# Here set 2.0 to test the case there is no outputs.
# In practical use, 0.0 < score_threshold < 1.0
self.score_threshold = 2.0
class TestMulticlassNMSLoDInput(OpTest):
def set_argument(self):
self.score_threshold = 0.01
def setUp(self):
self.set_argument()
M = 1200
C = 21
BOX_SIZE = 4
box_lod = [[1200]]
background = 0
nms_threshold = 0.3
nms_top_k = 400
keep_top_k = 200
score_threshold = self.score_threshold
normalized = False
scores = np.random.random((M, C)).astype('float32')
def softmax(x):
shiftx = x - np.max(x).clip(-64.)
exps = np.exp(shiftx)
return exps / np.sum(exps)
scores = np.apply_along_axis(softmax, 1, scores)
boxes = np.random.random((M, C, BOX_SIZE)).astype('float32')
boxes[:, :, 0] = boxes[:, :, 0] * 10
boxes[:, :, 1] = boxes[:, :, 1] * 10
boxes[:, :, 2] = boxes[:, :, 2] * 10 + 10
boxes[:, :, 3] = boxes[:, :, 3] * 10 + 10
nmsed_outs, lod = lod_multiclass_nms(
boxes, scores, background, score_threshold, nms_threshold,
nms_top_k, keep_top_k, box_lod, normalized)
nmsed_outs = [-1] if not nmsed_outs else nmsed_outs
nmsed_outs = np.array(nmsed_outs).astype('float32')
self.op_type = 'multiclass_nms'
self.inputs = {
'BBoxes': (boxes, box_lod),
'Scores': (scores, box_lod),
}
self.outputs = {'Out': (nmsed_outs, [lod])}
self.attrs = {
'background_label': 0,
'nms_threshold': nms_threshold,
'nms_top_k': nms_top_k,
'keep_top_k': keep_top_k,
'score_threshold': score_threshold,
'nms_eta': 1.0,
'normalized': normalized,
}
def test_check_output(self):
self.check_output()
class TestIOU(unittest.TestCase):
def test_iou(self):
box1 = np.array([4.0, 3.0, 7.0, 5.0]).astype('float32')
box2 = np.array([3.0, 4.0, 6.0, 8.0]).astype('float32')
expt_output = np.array([2.0 / 16.0]).astype('float32')
calc_output = np.array([iou(box1, box2, True)]).astype('float32')
self.assertTrue(np.allclose(calc_output, expt_output))
if __name__ == '__main__':
unittest.main()
| |
#!/usr/bin/env python
# __BEGIN_LICENSE__
# Copyright (c) 2009-2013, United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The NGT platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __END_LICENSE__
# For each DEM, blend it, within its footprint, with neighboring DEMs.
# That is to say, make several mosaics. First the DEM alone. Then
# blended with the one on the right. Then also with the one on the
# left. Then with second on the right. Then with second on the left.
# Keep the result with the lowest mean error to lidar.
# It creates files of the form:
# processed/batch_2489_2490_2/out-blend-DEM.tif
# processed/batch_2489_2490_2/out-blend-DEM-diff.csv
# Operate in the range [startFrame, stopFrame) so does not include the
# last one.
# See usage below.
# TODO: Find a better way of picking DEMs to blend. For example,
# include only those which decrease the mean error. Then compare
# if this approach gives lower error than the current way
# which keeps on adding DEMs left and right.
import os, sys, argparse, datetime, time, subprocess, logging, multiprocessing, re, glob
import traceback
import os.path as P
# The path to the ASP python files and tools
basepath = os.path.dirname(os.path.realpath(__file__)) # won't change, unlike syspath
pythonpath = os.path.abspath(basepath + '/../Python') # for dev ASP
libexecpath = os.path.abspath(basepath + '/../libexec') # for packaged ASP
binpath = os.path.abspath(basepath + '/../bin') # for packaged ASP
icebridgepath = os.path.abspath(basepath + '/../IceBridge') # IceBridge tools
toolspath = os.path.abspath(basepath + '/../Tools') # ASP Tools
# Prepend to Python path
sys.path.insert(0, basepath)
sys.path.insert(0, pythonpath)
sys.path.insert(0, libexecpath)
sys.path.insert(0, icebridgepath)
import icebridge_common
import asp_system_utils, asp_alg_utils, asp_geo_utils
asp_system_utils.verify_python_version_is_supported()
# Prepend to system PATH
os.environ["PATH"] = basepath + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = pythonpath + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = libexecpath + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = icebridgepath + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = toolspath + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = binpath + os.pathsep + os.environ["PATH"]
def getMeanDemDiff(dems, outputPrefix):
'''Get the mean distance between the input DEMs to the main DEM.'''
mainDem = dems[0]
meanDiff = 0.0
meanCount = 0.0
for i in range(1,len(dems)):
thisDem = dems[i]
if not thisDem:
continue
diffPrefix = outputPrefix + '_' + str(i)
diffPath = diffPrefix + '-diff.tif'
cmd = ('geodiff --absolute %s %s -o %s' % (mainDem, thisDem, diffPrefix))
print(cmd)
asp_system_utils.executeCommand(cmd, diffPath, True, False)
try:
# Read in and examine the results
results = icebridge_common.readGeodiffOutput(diffPath)
print("Found inter-DEM diff " + str(i) + " = " + str(results['Mean']))
meanDiff = meanDiff + results['Mean']
meanCount = meanCount + 1.0
except:
print('No overlap with DEM ' + thisDem)
if meanCount < 1: # Handle degenerate cases
return 0
meanDiff = meanDiff / meanCount
print('Mean of DEM diffs = ' + str(meanDiff))
return meanDiff
def runBlend(frame, processFolder, lidarFile, fireballDEM, options,
threadText, redo, suppressOutput):
WEIGHT_EXP = 1.3
# This will run as multiple processes. Hence have to catch all exceptions.
try:
demFile, batchFolder = icebridge_common.frameToFile(frame, icebridge_common.alignFileName(),
processFolder, options.bundleLength)
lidarCsvFormatString = icebridge_common.getLidarCsvFormat(lidarFile)
if demFile == "":
print("Could not find DEM for frame: " + str(frame))
return
# The names for the final results
finalOutputPrefix = os.path.join(batchFolder, 'out-blend-DEM')
finalBlend = finalOutputPrefix + '.tif'
finalDiff = finalOutputPrefix + "-diff.csv"
fireballOutputPrefix = os.path.join(batchFolder, 'out-blend-fb-footprint')
fireballBlendOutput = fireballOutputPrefix + '-tile-0.tif'
finalFireballOutput = fireballOutputPrefix + '-DEM.tif'
fireballDiffPath = fireballOutputPrefix + "-diff.csv"
# This is turned off for now. Find the diff between neighboring
# aligned DEMs before blending.
prevDemFile, prevBatchFolder = \
icebridge_common.frameToFile(frame-1,
icebridge_common.alignFileName(),
processFolder, options.bundleLength)
prevDiffPrefix = os.path.join(batchFolder, 'out-prev')
prevDiffFile = prevDiffPrefix + '-diff.tif'
if options.computeDiffToPrev and redo and os.path.exists(prevDiffFile):
os.system("rm -f " + prevDiffFile)
if os.path.exists(prevDemFile) and os.path.exists(demFile):
if os.path.exists(prevDiffFile):
print("File exists: " + prevDiffFile)
else:
cmd = ('geodiff --absolute %s %s -o %s' %
(prevDemFile, demFile, prevDiffPrefix))
print(cmd)
asp_system_utils.executeCommand(cmd, prevDiffFile, suppressOutput, redo)
if not redo:
set1Exists = False
if (os.path.exists(finalBlend) and os.path.exists(finalDiff)):
print("Files exist: " + finalBlend + " " + finalDiff + ".")
set1Exists = True
set2Exists = True
if fireballDEM != "":
if (os.path.exists(finalFireballOutput) and os.path.exists(fireballDiffPath)):
print("Files exist: " + finalFireballOutput + " " + fireballDiffPath + ".")
set2Exists = True
else:
set2Exists = False
if set1Exists and set2Exists:
return
# We will blend the dems with frame offsets within frameOffsets[0:index]
filesToWipe = []
bestMean = 1.0e+100
bestBlend = ''
bestVals = ''
bestDiff = ''
# Look at frames with these offsets when blending
frameOffsets = [0, 1, -1, 2, -2]
for index in range(len(frameOffsets)):
# Find all the DEMs up to the current index
dems = []
currDemFile = ""
for val in range(0, index+1):
offset = frameOffsets[val]
currDemFile, currBatchFolder = \
icebridge_common.frameToFile(frame + offset,
icebridge_common.alignFileName(),
processFolder, options.bundleLength)
if currDemFile == "":
continue
dems.append(currDemFile)
if currDemFile == "":
# The last DEM was not present. Hence this iteration will add nothing new.
continue
# Compute the mean distance between the DEMs
# TODO: Make sure this gets cleaned up!
meanWorkPrefix = os.path.join(batchFolder, 'bd')
meanDiff = getMeanDemDiff(dems, meanWorkPrefix)
# If the mean error between DEMs is creater than this,
# use a less aggressive blending method.
MEAN_DIFF_BLEND_THRESHOLD = 1.0
demString = " ".join(dems)
outputPrefix = os.path.join(batchFolder, 'out-blend-' + str(index))
# See if we have a pre-existing DEM to use as footprint
footprintDEM = os.path.join(batchFolder, 'out-trans-footprint-DEM.tif')
blendOutput = outputPrefix + '-tile-0.tif'
if os.path.exists(footprintDEM):
cmd = ('dem_mosaic --weights-exponent %f --this-dem-as-reference %s %s %s -o %s'
% (WEIGHT_EXP, footprintDEM, demString, threadText, outputPrefix))
else:
cmd = ('dem_mosaic --weights-exponent %f --first-dem-as-reference %s %s -o %s'
% (WEIGHT_EXP, demString, threadText, outputPrefix))
if meanDiff > MEAN_DIFF_BLEND_THRESHOLD:
cmd += ' --propagate-nodata --use-centerline-weights '
print(cmd)
# Execute the blend command.
# - Sometimes there is junk left from a previous interrupted run. So if we
# got so far, recreate all files.
localRedo = True
print(cmd)
asp_system_utils.executeCommand(cmd, blendOutput, suppressOutput, localRedo)
filesToWipe.append(blendOutput)
diffPath = outputPrefix + "-diff.csv"
filesToWipe.append(diffPath)
# Compute post-blending error to lidar
cmd = ('geodiff --absolute --csv-format %s %s %s -o %s' %
(lidarCsvFormatString, blendOutput, lidarFile, outputPrefix))
print(cmd)
asp_system_utils.executeCommand(cmd, diffPath, suppressOutput, redo)
# Read in and examine the results
try:
results = icebridge_common.readGeodiffOutput(diffPath)
print("Current mean error to lidar is " + str(results['Mean']))
if bestMean > float(results['Mean']):
bestMean = float(results['Mean'])
bestBlend = blendOutput
bestVals = demString
bestDiff = diffPath
except Exception as e:
pass
logFiles = glob.glob(outputPrefix + "*" + "-log-" + "*")
filesToWipe += logFiles
# Update the filenames of the output files
print("Best mean error to lidar is " + str(bestMean) + " when blending " + bestVals)
cmd = "mv " + bestBlend + " " + finalBlend
print(cmd)
asp_system_utils.executeCommand(cmd, finalBlend, suppressOutput, redo)
cmd = "mv " + bestDiff + " " + finalDiff
print(cmd)
asp_system_utils.executeCommand(cmd, finalDiff, suppressOutput, redo)
# Generate a thumbnail of the final DEM
hillOutput = finalOutputPrefix+'_HILLSHADE.tif'
cmd = 'hillshade ' + finalBlend +' -o ' + hillOutput
asp_system_utils.executeCommand(cmd, hillOutput, suppressOutput, redo)
# Generate a low resolution compressed thumbnail of the hillshade for debugging
thumbOutput = finalOutputPrefix + '_HILLSHADE_browse.tif'
cmd = 'gdal_translate '+hillOutput+' '+thumbOutput+' -of GTiff -outsize 40% 40% -b 1 -co "COMPRESS=JPEG"'
asp_system_utils.executeCommand(cmd, thumbOutput, suppressOutput, redo)
os.system("rm -f " + hillOutput) # Remove this file to keep down the file count
# Do another blend, to this DEM's footprint, but not using it
if fireballDEM != "":
# Find all the DEMs
dems = []
for val in range(0, len(frameOffsets)):
offset = frameOffsets[val]
currDemFile, currBatchFolder = \
icebridge_common.frameToFile(frame + offset,
icebridge_common.alignFileName(),
processFolder, options.bundleLength)
if currDemFile == "":
continue
dems.append(currDemFile)
demString = " ".join(dems)
cmd = ('dem_mosaic --weights-exponent %f --this-dem-as-reference %s %s %s -o %s'
% (WEIGHT_EXP, fireballDEM, demString, threadText, fireballOutputPrefix))
#filesToWipe.append(fireballBlendOutput)
print(cmd)
# Sometimes there is junk left from a previous interrupted run. So if we
# got so far, recreate all files.
localRedo = True
asp_system_utils.executeCommand(cmd, fireballBlendOutput, suppressOutput, localRedo)
#filesToWipe.append(fireballDiffPath)
cmd = ('geodiff --absolute --csv-format %s %s %s -o %s' %
(lidarCsvFormatString, fireballBlendOutput, lidarFile, fireballOutputPrefix))
print(cmd)
asp_system_utils.executeCommand(cmd, fireballDiffPath, suppressOutput, redo)
# Read in and examine the results
try:
results = icebridge_common.readGeodiffOutput(fireballDiffPath)
print("Mean error to lidar in fireball footprint is " + str(results['Mean']))
cmd = "mv " + fireballBlendOutput + " " + finalFireballOutput
print(cmd)
asp_system_utils.executeCommand(cmd, finalFireballOutput, suppressOutput, redo)
except Exception as e:
pass
# Generate a thumbnail of the final DEM
#hillOutput = fireballOutputPrefix+'_HILLSHADE.tif'
#cmd = 'hillshade ' + finalFireballOutput +' -o ' + hillOutput
#print(cmd)
#asp_system_utils.executeCommand(cmd, hillOutput, suppressOutput, redo)
## Generate a low resolution compressed thumbnail of the hillshade for debugging
#thumbOutput = fireballOutputPrefix + '_HILLSHADE_browse.tif'
#cmd = 'gdal_translate '+hillOutput+' '+thumbOutput+' -of GTiff -outsize 40% 40% -b 1 -co "COMPRESS=JPEG"'
#asp_system_utils.executeCommand(cmd, thumbOutput, suppressOutput, redo)
#os.system("rm -f " + hillOutput) # Remove this file to keep down the file count
logFiles = glob.glob(fireballOutputPrefix + "*" + "-log-" + "*")
filesToWipe += logFiles
# Done with dealing with the fireball footprint
# Clean up extra files
for fileName in filesToWipe:
if os.path.exists(fileName):
print("Removing: " + fileName)
os.system("rm -f " + fileName)
# TODO: Handle this cleanup better!
os.system('rm -f ' + meanWorkPrefix + '*')
except Exception as e:
print('Blending failed!\n' + str(e) + ". " + str(traceback.print_exc()))
sys.stdout.flush()
def main(argsIn):
try:
# Sample usage:
# python ~/projects/StereoPipeline/src/asp/IceBridge/blend_dems.py --site GR \
# --yyyymmdd 20120315 --start-frame 2490 --stop-frame 2491 --bundle-length 2 \
# --num-threads 8 --num-processes 10
usage = '''blend_dems.py <options>'''
parser = argparse.ArgumentParser(usage=usage)
# Run selection
parser.add_argument("--yyyymmdd", dest="yyyymmdd", required=True,
help="Specify the year, month, and day in one YYYYMMDD string.")
parser.add_argument("--site", dest="site", required=True,
help="Name of the location of the images (AN, GR, or AL)")
parser.add_argument("--output-folder", dest="outputFolder", default=None,
help="Name of the output folder. If not specified, " + \
"use something like AN_YYYYMMDD.")
# Processing options
parser.add_argument('--bundle-length', dest='bundleLength', default=2,
type=int, help="The number of images to bundle adjust and process " + \
"in a single batch.")
parser.add_argument('--start-frame', dest='startFrame', type=int,
default=icebridge_common.getSmallestFrame(),
help="Frame to start with. Leave this and stop-frame blank to " + \
"process all frames.")
parser.add_argument('--stop-frame', dest='stopFrame', type=int,
default=icebridge_common.getLargestFrame(),
help='Frame to stop on. This frame will also be processed.')
parser.add_argument("--processing-subfolder", dest="processingSubfolder", default=None,
help="Specify a subfolder name where the processing outputs will go. "+\
"The default is no additional folder.")
parser.add_argument("--compute-diff-to-prev-dem", action="store_true",
dest="computeDiffToPrev", default=False,
help="Compute the absolute difference between the current DEM " +
"and the one before it.")
parser.add_argument("--blend-to-fireball-footprint", action="store_true",
dest="blendToFireball", default=False,
help="Create additional blended DEMs having the same " + \
"footprint as Fireball DEMs.")
# Performance options
parser.add_argument('--num-processes', dest='numProcesses', default=1,
type=int, help='The number of simultaneous processes to run.')
parser.add_argument('--num-threads', dest='numThreads', default=8,
type=int, help='The number of threads per process.')
options = parser.parse_args(argsIn)
except argparse.ArgumentError as msg:
parser.error(msg)
icebridge_common.switchWorkDir()
os.system("ulimit -c 0") # disable core dumps
os.system("rm -f core.*") # these keep on popping up
os.system("umask 022") # enforce files be readable by others
if len(options.yyyymmdd) != 8 and len(options.yyyymmdd) != 9:
# Make an exception for 20100422a
raise Exception("The --yyyymmdd field must have length 8 or 9.")
if options.outputFolder is None:
options.outputFolder = icebridge_common.outputFolder(options.site, options.yyyymmdd)
os.system('mkdir -p ' + options.outputFolder)
logLevel = logging.INFO # Make this an option??
logger = icebridge_common.setUpLogger(options.outputFolder, logLevel,
'icebridge_blend_log')
(out, err, status) = asp_system_utils.executeCommand(['uname', '-a'],
suppressOutput = True)
logger.info("Running on machine: " + out)
logger.info(str(argsIn))
processFolder = os.path.join(options.outputFolder, 'processed')
# Handle subfolder option. This is useful for comparing results with different parameters!
if options.processingSubfolder:
processFolder = os.path.join(processFolder, options.processingSubfolder)
logger.info('Reading from processing subfolder: ' + options.processingSubfolder)
orthoFolder = icebridge_common.getOrthoFolder(options.outputFolder)
orthoIndexPath = icebridge_common.csvIndexFile(orthoFolder)
if not os.path.exists(orthoIndexPath):
raise Exception("Error: Missing ortho index file: " + orthoIndexPath + ".")
(orthoFrameDict, orthoUrlDict) = icebridge_common.readIndexFile(orthoIndexPath)
if options.blendToFireball:
fireballFrameDict = icebridge_common.getCorrectedFireballDems(options.outputFolder)
lidarFolder = icebridge_common.getLidarFolder(options.outputFolder)
threadText = ''
if options.numThreads:
threadText = '--threads ' + str(options.numThreads)
redo = False
suppressOutput = True
taskHandles = []
if options.numProcesses > 1:
pool = multiprocessing.Pool(options.numProcesses)
# Bound the frames
sortedFrames = sorted(orthoFrameDict.keys())
if len(sortedFrames) > 0:
if options.startFrame < sortedFrames[0]:
options.startFrame = sortedFrames[0]
if options.stopFrame > sortedFrames[-1] + 1:
options.stopFrame = sortedFrames[-1] + 1
else:
# No ortho files, that means nothing to do
options.startFrame = 0
options.stopFrame = 0
for frame in range(options.startFrame, options.stopFrame):
if not frame in orthoFrameDict:
logger.info("Error: Missing ortho file for frame: " + str(frame) + ".")
continue
orthoFile = orthoFrameDict[frame]
try:
lidarFile = icebridge_common.findMatchingLidarFile(orthoFile, lidarFolder)
except: # Skip if no lidar file matches this frame
continue
fireballDEM = ""
if options.blendToFireball:
if frame in fireballFrameDict:
fireballDEM = fireballFrameDict[frame]
else:
logger.info("No fireball DEM for frame: " + str(frame))
args = (frame, processFolder, lidarFile, fireballDEM, options, threadText,
redo, suppressOutput)
# Run things sequentially if only one process, to make it easy to debug
if options.numProcesses > 1:
taskHandles.append(pool.apply_async(runBlend, args))
else:
runBlend(*args)
if options.numProcesses > 1:
icebridge_common.waitForTaskCompletionOrKeypress(taskHandles, logger, interactive = False,
quitKey='q', sleepTime=20)
icebridge_common.stopTaskPool(pool)
# Run main function if file used from shell
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| |
#----------------------------------------------------------------------
#
# Author: Laszlo Nagy
#
# Copyright: (c) 2005 by Szoftver Messias Bt.
# Licence: BSD style
#
#
#----------------------------------------------------------------------
import os
import md5
import urllib
import urllib2
import mimetypes
#from gzip import GzipFile
import cStringIO
from cPickle import loads,dumps
import cookielib
class MozillaCacher(object):
"""A dictionary like object, that can cache results on a storage device."""
def __init__(self,cachedir='.cache'):
self.cachedir = cachedir
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
def name2fname(self,name):
return os.path.join(self.cachedir,name)
def __getitem__(self,name):
if not isinstance(name,str):
raise TypeError()
fname = self.name2fname(name)
if os.path.isfile(fname):
return file(fname,'rb').read()
else:
raise IndexError()
def __setitem__(self,name,value):
if not isinstance(name,str):
raise TypeError()
fname = self.name2fname(name)
if os.path.isfile(fname):
os.unlink(fname)
f = file(fname,'wb+')
try:
f.write(value)
finally:
f.close()
def __delitem__(self,name):
if not isinstance(name,str):
raise TypeError()
fname = self.name2fname(name)
if os.path.isfile(fname):
os.unlink(fname)
def __iter__(self):
raise NotImplementedError()
def has_key(self,name):
return os.path.isfile(self.name2fname(name))
class MozillaEmulator(object):
def __init__(self,cacher={},trycount=0):
"""Create a new MozillaEmulator object.
@param cacher: A dictionary like object, that can cache search results on a storage device.
You can use a simple dictionary here, but it is not recommended.
You can also put None here to disable caching completely.
@param trycount: The download() method will retry the operation if it fails. You can specify -1 for infinite retrying.
A value of 0 means no retrying. A value of 1 means one retry. etc."""
self.cacher = cacher
self.cookies = cookielib.CookieJar()
self.debug = False
self.trycount = trycount
def _hash(self,data):
h = md5.new()
h.update(data)
return h.hexdigest()
def build_opener(self,url,postdata=None,extraheaders={},forbid_redirect=False):
txheaders = {
'Accept':'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
'Accept-Language':'en,hu;q=0.8,en-us;q=0.5,hu-hu;q=0.3',
# 'Accept-Encoding': 'gzip, deflate',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
# 'Keep-Alive': '300',
# 'Connection': 'keep-alive',
# 'Cache-Control': 'max-age=0',
}
for key,value in extraheaders.iteritems():
txheaders[key] = value
req = urllib2.Request(url, postdata, txheaders)
self.cookies.add_cookie_header(req)
if forbid_redirect:
redirector = HTTPNoRedirector()
else:
redirector = urllib2.HTTPRedirectHandler()
http_handler = urllib2.HTTPHandler(debuglevel=self.debug)
https_handler = urllib2.HTTPSHandler(debuglevel=self.debug)
u = urllib2.build_opener(http_handler,https_handler,urllib2.HTTPCookieProcessor(self.cookies),redirector)
u.addheaders = [('User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; hu-HU; rv:1.7.8) Gecko/20050511 Firefox/1.0.4')]
if not postdata is None:
req.add_data(postdata)
return (req,u)
def download(self,url,postdata=None,extraheaders={},forbid_redirect=False,
trycount=None,fd=None,onprogress=None,only_head=False):
"""Download an URL with GET or POST methods.
@param postdata: It can be a string that will be POST-ed to the URL.
When None is given, the method will be GET instead.
@param extraheaders: You can add/modify HTTP headers with a dict here.
@param forbid_redirect: Set this flag if you do not want to handle
HTTP 301 and 302 redirects.
@param trycount: Specify the maximum number of retries here.
0 means no retry on error. Using -1 means infinite retring.
None means the default value (that is self.trycount).
@param fd: You can pass a file descriptor here. In this case,
the data will be written into the file. Please note that
when you save the raw data into a file then it won't be cached.
@param onprogress: A function that has two parameters:
the size of the resource and the downloaded size. This will be
called for each 1KB chunk. (If the HTTP header does not contain
the content-length field, then the size parameter will be zero!)
@param only_head: Create the openerdirector and return it. In other
words, this will not retrieve any content except HTTP headers.
@return: The raw HTML page data, unless fd was specified. When fd
was given, the return value is undefined.
"""
if trycount is None:
trycount = self.trycount
cnt = 0
while True:
try:
key = self._hash(url)
if (self.cacher is None) or (not self.cacher.has_key(key)):
req,u = self.build_opener(url,postdata,extraheaders,forbid_redirect)
openerdirector = u.open(req)
if self.debug:
print req.get_method(),url
print openerdirector.code,openerdirector.msg
print openerdirector.headers
self.cookies.extract_cookies(openerdirector,req)
if only_head:
return openerdirector
if openerdirector.headers.has_key('content-length'):
length = long(openerdirector.headers['content-length'])
else:
length = 0
dlength = 0
if fd:
while True:
data = openerdirector.read(1024)
dlength += len(data)
fd.write(data)
if onprogress:
onprogress(length,dlength)
if not data:
break
else:
data = ''
while True:
newdata = openerdirector.read(1024)
dlength += len(newdata)
data += newdata
if onprogress:
onprogress(length,dlength)
if not newdata:
break
#data = openerdirector.read()
if not (self.cacher is None):
self.cacher[key] = data
else:
data = self.cacher[key]
#try:
# d2= GzipFile(fileobj=cStringIO.StringIO(data)).read()
# data = d2
#except IOError:
# pass
return data
except urllib2.URLError:
cnt += 1
if (trycount > -1) and (trycount < cnt):
raise
# Retry :-)
if self.debug:
print "MozillaEmulator: urllib2.URLError, retryting ",cnt
def post_multipart(self,url,fields, files, forbid_redirect=True):
"""Post fields and files to an http host as multipart/form-data.
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return the server's response page.
"""
content_type, post_data = encode_multipart_formdata(fields, files)
result = self.download(url,post_data,{
'Content-Type': content_type,
'Content-Length': str(len(post_data))
},forbid_redirect=forbid_redirect
)
return result
class HTTPNoRedirector(urllib2.HTTPRedirectHandler):
"""This is a custom http redirect handler that FORBIDS redirection."""
def http_error_302(self, req, fp, code, msg, headers):
e = urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
if e.code in (301,302):
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
e.newurl = newurl
raise e
def encode_multipart_formdata(fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
CRLF = '\r\n'
L = []
for (key, value) in fields:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
L.append(value)
for (key, filename, value) in files:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
L.append('Content-Type: %s' % get_content_type(filename))
L.append('')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# HOW TO USE
dl = MozillaEmulator()
# Make sure that we get cookies from the server before logging in
frontpage = dl.download("https://somesite.net/login.php")
# Sign in POST
post_data = "action=sign_in&username=user1&password=pwd1"
page = dl.download("https://somesite.net/sign_in.php",post_data)
if "Welcome" in page:
# Send a file
fdata = file("inventory.txt","rb").read()
dl.post_multipart('https://somesimte.net/upload-file.php',
[('uploadType','Inventory'),('otherfield','othervalue')],
[('uploadFileName','inventory.txt',fdata)]
)
| |
"""
Project related model.
"""
from string import capwords
from zope.interface import implementer
from sqlalchemy import (
Column,
String,
Boolean,
Unicode,
Integer,
ForeignKey,
desc,
UniqueConstraint,
and_,
cast,
Text,
Index,
)
from sqlalchemy.orm import relationship, backref, joinedload
from sqlalchemy.dialects.postgresql import TSVECTOR
from sqlalchemy.sql.expression import func
from clld.interfaces import ISource, ILanguage
from clld.db.meta import DBSession, Base, CustomModelMixin
from clld.db.models.common import (
Language, Source, IdNameDescriptionMixin, IdentifierType, Identifier, Parameter,
)
from clld.util import DeclEnum
from glottolog3.interfaces import IProvider
from glottolog3.config import github
def get_parameter(pid):
return DBSession.query(Parameter)\
.filter(Parameter.id == pid)\
.options(joinedload(Parameter.domain))\
.one()
def get_source(key):
return DBSession.query(Ref).join(Refprovider).filter(Refprovider.id == key).one()
@implementer(IProvider)
class Provider(Base, IdNameDescriptionMixin):
"""A provider of bibliographical data, i.e. entries in Ref.
name -> id
description -> name
comment -> description
"""
abbr = Column(Unicode)
url = Column(Unicode)
# if refurl is given, we can construct direct links to the provider's catalog ...
refurl = Column(Unicode)
# ... using the content of the bibfield attribute of a Ref instance.
bibfield = Column(Unicode)
@property
def github_url(self):
return github('references/bibtex/{0}.bib'.format(self.id))
class Doctype(Base, IdNameDescriptionMixin):
"""
id -> pk
id -> id
abbr -> abbr
name -> name
"""
abbr = Column(Unicode)
ord = Column(Integer)
def __str__(self):
return capwords(self.name.replace('_', ' '))
class Refdoctype(Base):
__table_args__ = (UniqueConstraint('ref_pk', 'doctype_pk'),)
doctype_pk = Column(Integer, ForeignKey('doctype.pk'), nullable=False)
ref_pk = Column(Integer, ForeignKey('ref.pk'), nullable=False)
class Refprovider(Base):
__table_args__ = (UniqueConstraint('ref_pk', 'provider_pk', 'id'),)
provider_pk = Column(Integer, ForeignKey('provider.pk'), nullable=False)
ref_pk = Column(Integer, ForeignKey('ref.pk'), nullable=False)
id = Column(Unicode, unique=True, nullable=False)
provider = relationship(Provider)
@classmethod
def get_stats(cls):
return {
row[0]: row[1] for row in
DBSession.query(Provider.pk, func.count(cls.ref_pk).label('c'))
.filter(Provider.pk == cls.provider_pk)
.group_by(Provider.pk)
.order_by(desc('c'))
.all()}
#-----------------------------------------------------------------------------
# specialized common mapper classes
#-----------------------------------------------------------------------------
class LanguoidLevel(DeclEnum):
family = 'family', 'family'
language = 'language', 'language'
dialect = 'dialect', 'dialect'
@implementer(ILanguage)
class Languoid(CustomModelMixin, Language):
"""
id -> pk
alnumcode -> id
primaryname -> name
names, codes -> languageidentifier
refs -> languagesource
"""
GLOTTOLOG_NAME = u'glottolog'
pk = Column(Integer, ForeignKey('language.pk'), primary_key=True)
# hid is the id of a language in Harald's classification. I.e. if hid is None, the
# languoid is not a H-Language.
hid = Column(Unicode, unique=True)
father_pk = Column(Integer, ForeignKey('languoid.pk'))
family_pk = Column(Integer, ForeignKey('languoid.pk'))
level = Column(LanguoidLevel.db_type())
bookkeeping = Column(Boolean, default=False)
category = Column(Unicode)
newick = Column(Unicode)
child_family_count = Column(Integer)
child_language_count = Column(Integer)
child_dialect_count = Column(Integer)
macroareas = Column(Unicode)
descendants = relationship(
'Languoid',
order_by='Languoid.name, Languoid.id',
foreign_keys=[family_pk],
backref=backref('family', remote_side=[pk]))
children = relationship(
'Languoid',
order_by='Languoid.name, Languoid.id',
foreign_keys=[father_pk],
backref=backref('father', remote_side=[pk]))
def get_identifier_objs(self, type_):
if getattr(type_, 'value', type_) == IdentifierType.glottolog.value:
return [
Identifier(name=self.id, type=IdentifierType.glottolog.value)]
return Language.get_identifier_objs(self, type_)
def get_ancestors(self, session=None):
"""
:return: Iterable of ancestors of self back to the top-level family.
"""
session = session or DBSession
# retrieve the ancestors ordered by distance, i.e. from direct parent
# to top-level family:
return session.query(Languoid)\
.join(TreeClosureTable, and_(
TreeClosureTable.parent_pk == Languoid.pk,
TreeClosureTable.depth > 0))\
.filter(TreeClosureTable.child_pk == self.pk)\
.order_by(TreeClosureTable.depth)
@property
def github_url(self):
path = [self.id]
for l in self.get_ancestors():
path.append(l.id)
return github('languoids/tree/{0}/md.ini'.format('/'.join(reversed(path))))
def __json__(self, req=None, core=False):
def ancestor(l):
r = {"name": l.name, "id": l.id}
if req:
r['url'] = req.resource_url(l)
return r
res = super(Languoid, self).__json__(req)
if not core:
res['classification'] = [ancestor(l) for l in reversed(list(self.get_ancestors()))]
if self.iso_code:
res[IdentifierType.iso.value] = self.iso_code
res['macroareas'] = {}#{ma.id: ma.name for ma in self.macroareas}
return res
def get_geocoords(self):
"""
:return: sqlalchemy Query selecting quadrupels \
(lid, primaryname, longitude, latitude) where lid is the Languoidbase.id of one\
of the children of self.
.. note::
This method does not return the geo coordinates of the Languoid self, but of
its descendants.
"""
child_pks = DBSession.query(Languoid.pk)\
.filter(Languoid.father_pk == self.pk).subquery()
return DBSession.query(
TreeClosureTable.parent_pk,
Language.name,
Language.longitude,
Language.latitude,
Language.id)\
.filter(Language.pk == TreeClosureTable.child_pk)\
.filter(TreeClosureTable.parent_pk.in_(child_pks))\
.filter(Language.latitude != None)
@property
def valueset_dict(self):
return {vs.parameter.id: vs for vs in self.valuesets}
def classification(self, type_):
assert type_ in ['fc', 'sc']
return self.valueset_dict.get(type_)
@property
def fc(self):
c = self.classification('fc')
if c and c.description:
return c
@property
def sc(self):
c = self.classification('sc')
if c and c.description:
return c
def _crefs(self, t):
c = self.classification(t)
if c:
return list(c.references)
return []
@property
def crefs(self):
return sorted(
self._crefs('fc') + self.screfs, key=lambda r: -(r.source.year_int or 0))
@property
def screfs(self):
"""
The subclassification justification have a hereditary semantics. I.e.,
if there is a reference to, e.g., Indo-European it justifies every child
such as Germanic, Northwest Germanic and so on unless the child has its
own justification (in which case that ref supersedes and takes over everything
below). Suppose one is looking at a subfamily without its own
explicit justification, then one should get the parent justification.
"""
res = self._crefs('sc')
if not res:
if self.father:
res = self.father.screfs
return res
def __rdf__(self, request):
gold_type = None
if self.level == LanguoidLevel.family:
gold_type = 'LanguageSubfamily' if self.father_pk else 'LanguageFamily'
elif self.level == LanguoidLevel.language:
gold_type = 'Language'
elif self.level == LanguoidLevel.dialect:
gold_type = 'Dialect'
if gold_type:
yield 'rdf:type', 'http://purl.org/linguistics/gold/' + gold_type
if self.family:
yield 'skos:broaderTransitive', request.resource_url(self.family)
if self.father:
yield 'skos:broader', request.resource_url(self.father)
for child in self.children:
yield 'skos:narrower', request.resource_url(child)
if not self.active:
yield 'skos:changeNote', 'obsolete'
for area in (self.macroareas or '').split(', '):
yield 'dcterms:spatial', area
def jqtree(self, icon_map=None):
tree_ = []
children_map = {}
children_of_self = [c.pk for c in self.children]
query = DBSession.query(
Languoid.father_pk,
Languoid.pk, Languoid.id, Languoid.name,
Languoid.latitude, Languoid.hid,
cast(Languoid.level, Text),
Languoid.child_language_count, TreeClosureTable.depth)\
.select_from(Languoid).join(TreeClosureTable,
Languoid.pk == TreeClosureTable.child_pk)\
.filter(TreeClosureTable.parent_pk == (self.family_pk or self.pk))\
.order_by(TreeClosureTable.depth, Languoid.name)
for row in query:
fpk, cpk, id_, name, lat, hid, level, clc, depth = row
if hid and len(hid) != 3:
hid = None
label = name
if clc:
label += ' (%s)' % clc
#label = '%s [%s]' % (name, id_)
#if level == 'language' and hid and len(hid) == 3:
# label += '[%s]' % hid
node = {'id': id_, 'pk': cpk, 'iso': hid, 'level': level, 'label': label, 'children': []}
if icon_map and id_ == self.id and lat:
node['map_marker'] = icon_map[cpk]
if cpk in children_of_self:
node['child'] = True
if icon_map and (level == 'family' or lat):
node['map_marker'] = icon_map[cpk]
children_map[cpk] = node['children']
if not fpk:
tree_.append(node)
else:
if fpk not in children_map:
# this can be the case for dialects attached to inactive nodes
continue
children_map[fpk].append(node)
return tree_
# index datatables.Refs.default_order
source_order_index = Index('source_updated_desc_pk_desc_key',
Source.updated.desc(), Source.pk.desc(), unique=True)
@implementer(ISource)
class Ref(CustomModelMixin, Source):
"""
id -> pk
bibtexkey -> id
author + year -> name
title -> description
"""
pk = Column(Integer, ForeignKey('source.pk'), primary_key=True)
fts = Column(TSVECTOR)
field_labels = [
('author', 'author'),
('editor', 'editor'),
('year', 'year'),
('title', 'title'),
('address', 'city'),
('publisher', 'publisher'),
('pages', 'pages'),
('startpage', 'start page'),
('endpage', 'end page'),
('numberofpages', 'number of pages'),
('type', 'bibliographic type'),
('ozbib_id', 'OZBIB ID'),
]
endpage_int = Column(Integer)
inlg = Column(Unicode, index=True)
inlg_code = Column(Unicode, index=True)
subject = Column(Unicode)
subject_headings = Column(Unicode)
keywords = Column(Unicode)
normalizedauthorstring = Column(Unicode)
normalizededitorstring = Column(Unicode)
ozbib_id = Column(Integer)
language_note = Column(Unicode)
srctrickle = Column(Unicode)
gbid = Column(Unicode)
iaid = Column(Unicode)
#: store the trigger for computerized assignment of languages
ca_language_trigger = Column(Unicode)
#: store the trigger for computerized assignment of doctype
ca_doctype_trigger = Column(Unicode)
doctypes = relationship(
Doctype,
secondary=Refdoctype.__table__,
order_by='Doctype.ord',
backref=backref(
'refs', order_by='Source.author, Source.year, Source.description'))
macroareas = Column(Unicode)
med_index = Column(Integer)
med_pages = Column(Integer)
med_type = Column(Unicode)
providers = relationship(
Provider,
viewonly=True,
secondary=Refprovider.__table__,
order_by='Provider.id',
backref=backref(
'refs', order_by='Source.author, Source.year, Source.description'))
bibkeys = relationship(Refprovider, order_by='Refprovider.provider_pk', viewonly=True)
def __rdf__(self, request):
"""
% for provider in ctx.providers:
<dcterms:provenance rdf:parseType="Resource">
<dcterms:description rdf:resource="${request.route_url('langdoc.meta', _anchor='provider-' + str(provider.id))}"/>
<rdfs:label>${provider.description}</rdfs:label>
</dcterms:provenance>
% endfor
% for ma in ctx.macroareas:
<dcterms:spatial rdf:parseType="Resource">
<dcterms:description rdf:resource="${request.route_url('home.glossary', _anchor='macroarea-'+str(ma.id))}"/>
<rdfs:label>${ma.name}</rdfs:label>
</dcterms:spatial>
% endfor
% for dt in ctx.doctypes:
<dc:type rdf:parseType="Resource">
<dcterms:description rdf:resource="${request.route_url('home.glossary', _anchor='doctype-'+str(dt.id))}"/>
<rdfs:label>${dt.name}</rdfs:label>
</dc:type>
% endfor
"""
def __bibtex__(self):
res = {}
for attr in 'inlg inlg_code subject subject_headings keywords ozbib_id'.split():
v = getattr(self, attr, None)
if v:
res[attr] = '%s' % v
return res
Refprovider.ref = relationship(Ref)
class TreeClosureTable(Base):
__table_args__ = (UniqueConstraint('parent_pk', 'child_pk'),)
parent_pk = Column(Integer, ForeignKey('languoid.pk'))
child_pk = Column(Integer, ForeignKey('languoid.pk'))
depth = Column(Integer)
class LegacyCode(Base):
id = Column(String, unique=True)
version = Column(String)
def url(self, req):
files = req.registry.settings['clld.files']
page_url = str(files / 'glottolog-{0}'.format(self.version) / '{0}.html'.format(self.id))
return req.static_url(page_url)
| |
import datetime
import re
from urllib import urlencode
from xmlrpclib import Fault, loads
from backlinks.models import InboundBacklink
from backlinks.tests.xmlrpc import TestClientServerProxy
from django import template
from django.contrib.auth.models import User
from django.test import TestCase, Client
from committees.models import Committee
from laws.models import Law, PrivateProposal, Bill, Vote, VoteAction
from mks.mock import PINGABLE_MEMBER_ID
from mks.models import Party, Member
from mks.tests.base import TRACKBACK_CONTENT_TYPE
class MemberBacklinksViewsTestCase(TestCase):
urls = 'mks.server_urls'
def setUp(self):
super(MemberBacklinksViewsTestCase, self).setUp()
self.party_1 = Party.objects.create(name='party 1')
self.party_2 = Party.objects.create(name='party 2')
self.mk_1 = Member.objects.create(name='mk_1',
start_date=datetime.date(2010, 1, 1),
current_party=self.party_1,
backlinks_enabled=True)
self.mk_2 = Member.objects.create(name='mk_2',
start_date=datetime.date(2010, 1, 1),
current_party=self.party_1,
backlinks_enabled=False)
self.jacob = User.objects.create_user('jacob', 'jacob@jacobian.org',
'JKM')
self.mk_1.save()
self.mk_2.save()
self.committee_1 = Committee.objects.create(name='c1')
self.meeting_1 = self.committee_1.meetings.create(date=datetime.date.today() - datetime.timedelta(1),
protocol_text='jacob:\nI am a perfectionist\nadrian:\nI have a deadline')
self.meeting_2 = self.committee_1.meetings.create(date=datetime.date.today() - datetime.timedelta(2),
protocol_text='adrian:\nYou are a perfectionist\njacob:\nYou have a deadline')
self.law = Law.objects.create(title='law 1')
self.pp = PrivateProposal.objects.create(title='private proposal 1',
date=datetime.date.today() - datetime.timedelta(3))
self.pp.proposers.add(self.mk_1)
self.bill_1 = Bill.objects.create(stage='1', title='bill 1', law=self.law)
self.bill_1.proposals.add(self.pp)
self.bill_1.proposers.add(self.mk_1)
self.meeting_1.mks_attended.add(self.mk_1)
self.meeting_1.save()
self.meeting_2.mks_attended.add(self.mk_1)
self.meeting_2.save()
self.vote = Vote.objects.create(title='vote 1', time=datetime.datetime.now())
self.vote_action = VoteAction.objects.create(member=self.mk_1, vote=self.vote, type='for',
party=self.mk_1.current_party)
self.client = Client(SERVER_NAME='example.com')
self.xmlrpc_client = TestClientServerProxy('/pingback/')
self.PINGABLE_MEMBER_ID = str(self.mk_1.id)
self.NON_PINGABLE_MEMBER_ID = str(self.mk_2.id)
def trackbackPOSTRequest(self, path, params):
return self.client.post(path, urlencode(params), content_type=TRACKBACK_CONTENT_TYPE)
def assertTrackBackErrorResponse(self, response, msg):
if response.content.find('<error>1</error>') == -1:
raise self.failureException, msg
'''
def testTrackBackRDFTemplateTag(self):
t = template.Template("{% load trackback_tags %}{% trackback_rdf object_url object_title trackback_url True %}")
c = template.Context({'trackback_url': '/trackback/member/'+self.PINGABLE_MEMBER_ID+'/',
'object_url': self.pingableTargetUrl,
'object_title': 'Pingable Test Entry'})
rendered = t.render(c)
link_re = re.compile(r'dc:identifier="(?P<link>[^"]+)"')
match = link_re.search(rendered)
self.assertTrue(bool(match), 'TrackBack RDF not rendered')
self.assertEquals(match.groups('link')[0], self.pingableTargetUrl,
'TrackBack RDF did not contain a valid target URI')
ping_re = re.compile(r'trackback:ping="(?P<link>[^"]+)"')
match = ping_re.search(rendered)
self.assertTrue(bool(match), 'TrackBack RDF not rendered')
self.assertEquals(match.groups('link')[0], '/trackback/member/'+self.PINGABLE_MEMBER_ID+'/',
'TrackBack RDF did not contain a TrackBack server URI')
'''
def testPingNonLinkingSourceURI(self):
self.assertRaises(Fault,
self.xmlrpc_client.pingback.ping,
'http://example.com/bad-source-document/',
'http://example.com/member/' + PINGABLE_MEMBER_ID + '/')
try:
self.xmlrpc_client.pingback.ping('http://example.com/bad-source-document/',
'http://example.com/member/' + PINGABLE_MEMBER_ID + '/')
except Fault, f:
self.assertEquals(f.faultCode,
17,
'Server did not return "source URI does not link" response')
def testDisallowedMethod(self):
response = self.client.get('/pingback/')
self.assertEquals(response.status_code,
405,
'Server returned incorrect status code for disallowed HTTP method')
def testNonExistentRPCMethod(self):
self.assertRaises(Fault, self.xmlrpc_client.foo)
def testBadPostData(self):
post_data = urlencode({'sourceURI': 'http://example.com/good-source-document/',
'targetURI': 'http://example.com/member/' + PINGABLE_MEMBER_ID + '/'})
response = self.client.post('/pingback/', post_data, TRACKBACK_CONTENT_TYPE)
self.assertRaises(Fault,
loads,
response.content)
def testPingNonExistentTargetURI(self):
self.assertRaises(Fault,
self.xmlrpc_client.pingback.ping,
'http://example.com/member/non-existent-resource/',
'http://example.com/member/non-existent-resource')
try:
self.xmlrpc_client.pingback.ping('http://example.com/member/non-existent-resource/',
'http://example.com/member/non-existent-resource')
except Fault, f:
self.assertEquals(f.faultCode,
32,
'Server did not return "target does not exist" error')
def testPingAlreadyRegistered(self):
self.xmlrpc_client.pingback.ping('http://example.com/another-good-source-document/',
'http://example.com/member/' + PINGABLE_MEMBER_ID + '/')
self.assertRaises(Fault,
self.xmlrpc_client.pingback.ping,
'http://example.com/another-good-source-document/',
'http://example.com/member/' + PINGABLE_MEMBER_ID + '/')
try:
self.xmlrpc_client.pingback.ping('http://example.com/another-good-source-document/',
'http://example.com/member/' + PINGABLE_MEMBER_ID + '/')
except Fault, f:
self.assertEqual(f.faultCode,
48,
'Server did not return "ping already registered" error')
def testPingbackLinkTemplateTag(self):
t = template.Template("{% load pingback_tags %}{% pingback_link pingback_path %}")
c = template.Context({'pingback_path': '/pingback/'})
rendered = t.render(c)
link_re = re.compile(r'<link rel="pingback" href="([^"]+)" ?/?>')
match = link_re.search(rendered)
self.assertTrue(bool(match), 'Pingback link tag did not render')
self.assertEquals(match.groups(0)[0], 'http://example.com/pingback/',
'Pingback link tag rendered incorrectly')
def testPingNonPingableTargetURI(self):
self.assertRaises(Fault,
self.xmlrpc_client.pingback.ping,
'http://example.com/member/non-existent-resource/',
'http://example.com/member/' + str(self.NON_PINGABLE_MEMBER_ID) + '/')
try:
self.xmlrpc_client.pingback.ping('http://example.com/member/non-existent-resource/',
'http://example.com/member/' + str(self.NON_PINGABLE_MEMBER_ID) + '/')
except Fault, f:
self.assertEquals(f.faultCode,
33,
'Server did not return "target not pingable" error')
def testPingSourceURILinks(self):
r = self.xmlrpc_client.pingback.ping('http://example.com/good-source-document/',
'http://example.com/member/' + self.PINGABLE_MEMBER_ID + '/')
self.assertEquals(r,
"Ping from http://example.com/good-source-document/ to http://example.com/member/1/ registered",
"Failed registering ping")
registered_ping = InboundBacklink.objects.get(source_url='http://example.com/good-source-document/',
target_url='http://example.com/member/' + self.PINGABLE_MEMBER_ID + '/')
self.assertEquals(str(registered_ping.target_object.id),
PINGABLE_MEMBER_ID,
'Server did not return "target not pingable" error')
def testDisallowedTrackbackMethod(self):
response = self.client.get('/trackback/member/' + PINGABLE_MEMBER_ID + '/')
self.assertEquals(response.status_code,
405,
'Server returned incorrect status code for disallowed HTTP method')
def testPingNoURLParameter(self):
params = {'title': 'Example', 'excerpt': 'Example'}
response = self.trackbackPOSTRequest('/trackback/member/' + self.PINGABLE_MEMBER_ID + '/',
params)
self.assertTrackBackErrorResponse(response,
'Server did not return error response'
'for ping with no URL parameter')
def testPingBadURLParameter(self):
params = {'url': 'bad url'}
response = self.trackbackPOSTRequest('http://example.com/trackback/member/' + self.PINGABLE_MEMBER_ID + '/',
params)
self.assertTrackBackErrorResponse(response,
'Server did not return error response for ping with bad URL parameter')
def testPingNonExistentTarget(self):
params = {'url': 'http://example.com/good-source-document/'}
response = self.trackbackPOSTRequest('/trackback/member/5000/',
params)
self.assertTrackBackErrorResponse(response,
'Server did not return error response for ping against non-existent resource')
def testPingNonPingableTarget(self):
params = {'url': 'http://example.com/member/' + PINGABLE_MEMBER_ID + '/'}
response = self.trackbackPOSTRequest('/trackback/member/' + self.NON_PINGABLE_MEMBER_ID + '/',
params)
self.assertTrackBackErrorResponse(response,
'Server did not return error response for ping against non-pingable resource')
def testPingSuccess(self):
title = 'Backlinks Test - Test Good Source Document'
excerpt = 'This is a summary of the good source document'
params = {'url': 'http://example.com/good-source-document/', 'title': title, 'excerpt': excerpt}
track_target = '/trackback/member/' + self.PINGABLE_MEMBER_ID + '/'
response = self.trackbackPOSTRequest(track_target,
params)
self.assertTrue(response.content.find('<error>0</error>') > -1,
'Server did not return success response for a valid ping request')
registered_ping = InboundBacklink.objects.get(source_url='http://example.com/good-source-document/',
target_url='http://example.com' + self.mk_1.get_absolute_url())
self.assertEquals(registered_ping.title,
title,
'Server did not use title from ping request when registering')
self.assertEquals(registered_ping.excerpt,
excerpt,
'Server did not use excerpt from ping request when registering')
def tearDown(self):
super(MemberBacklinksViewsTestCase, self).tearDown()
self.party_1.delete()
self.party_2.delete()
self.mk_1.delete()
self.mk_2.delete()
self.jacob.delete()
| |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class implementing a multi-worker parameter server tf.distribute strategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.eager import context
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import device_setter
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
_LOCAL_CPU = "/device:CPU:0"
# TODO(yuefengz): maybe cache variables on local CPU.
@tf_export("distribute.experimental.ParameterServerStrategy", v1=[])
class ParameterServerStrategy(distribute_lib.Strategy):
"""An asynchronous multi-worker parameter server tf.distribute strategy.
This strategy requires two roles: workers and parameter servers. Variables and
updates to those variables will be assigned to parameter servers and other
operations are assigned to workers.
When each worker has more than one GPU, operations will be replicated on all
GPUs. Even though operations may be replicated, variables are not and each
worker shares a common view for which parameter server a variable is assigned
to.
By default it uses `TFConfigClusterResolver` to detect configurations for
multi-worker training. This requires a 'TF_CONFIG' environment variable and
the 'TF_CONFIG' must have a cluster spec.
This class assumes each worker is running the same code independently, but
parameter servers are running a standard server. This means that while each
worker will synchronously compute a single gradient update across all GPUs,
updates between workers proceed asynchronously. Operations that occur only on
the first replica (such as incrementing the global step), will occur on the
first replica *of every worker*.
It is expected to call `call_for_each_replica(fn, ...)` for any
operations which potentially can be replicated across replicas (i.e. multiple
GPUs) even if there is only CPU or one GPU. When defining the `fn`, extra
caution needs to be taken:
1) It is generally not recommended to open a device scope under the strategy's
scope. A device scope (i.e. calling `tf.device`) will be merged with or
override the device for operations but will not change the device for
variables.
2) It is also not recommended to open a colocation scope (i.e. calling
`tf.compat.v1.colocate_with`) under the strategy's scope. For colocating
variables, use `strategy.extended.colocate_vars_with` instead. Colocation of
ops will possibly create device assignment conflicts.
Note: This strategy only works with the Estimator API. Pass an instance of
this strategy to the `experimental_distribute` argument when you create the
`RunConfig`. This instance of `RunConfig` should then be passed to the
`Estimator` instance on which `train_and_evaluate` is called.
For Example:
```
strategy = tf.distribute.experimental.ParameterServerStrategy()
run_config = tf.estimator.RunConfig(
experimental_distribute.train_distribute=strategy)
estimator = tf.estimator.Estimator(config=run_config)
tf.estimator.train_and_evaluate(estimator,...)
```
"""
def __init__(self, cluster_resolver=None):
"""Initializes this strategy with an optional `cluster_resolver`.
Args:
cluster_resolver: Optional
`tf.distribute.cluster_resolver.ClusterResolver` object. Defaults to a
`tf.distribute.cluster_resolver.TFConfigClusterResolver`.
"""
if cluster_resolver is None:
cluster_resolver = TFConfigClusterResolver()
if not cluster_resolver.cluster_spec():
raise ValueError("Cluster spec must be non-empty in `cluster_resolver`.")
extended = ParameterServerStrategyExtended(
self, cluster_resolver=cluster_resolver)
super(ParameterServerStrategy, self).__init__(extended)
distribute_lib.distribution_strategy_gauge.get_cell("V2").set(
"ParameterServerStrategy")
distribute_lib.distribution_strategy_replica_gauge.get_cell("num_ps").set(
len(self.extended.parameter_devices))
@tf_export(v1=["distribute.experimental.ParameterServerStrategy"]) # pylint: disable=missing-docstring
class ParameterServerStrategyV1(distribute_lib.StrategyV1):
__doc__ = ParameterServerStrategy.__doc__
def __init__(self, cluster_resolver=None):
"""Initializes this strategy."""
super(ParameterServerStrategyV1, self).__init__(
ParameterServerStrategyExtended(
self, cluster_resolver=cluster_resolver))
distribute_lib.distribution_strategy_gauge.get_cell("V1").set(
"ParameterServerStrategy")
distribute_lib.distribution_strategy_replica_gauge.get_cell("num_ps").set(
len(self.extended.parameter_devices))
__init__.__doc__ = ParameterServerStrategy.__init__.__doc__
# TODO(josh11b): Switch to V2 when we no longer need to support tf.compat.v1.
class ParameterServerStrategyExtended(distribute_lib.StrategyExtendedV1):
"""Implementation of ParameterServerStrategy and CentralStorageStrategy."""
def __init__(self,
container_strategy,
cluster_resolver=None,
compute_devices=None,
parameter_device=None):
super(ParameterServerStrategyExtended, self).__init__(container_strategy)
self._initialize_strategy(
cluster_resolver=cluster_resolver,
compute_devices=compute_devices,
parameter_device=parameter_device)
# We typically don't need to do all-reduce in this strategy.
self._cross_device_ops = (
cross_device_ops_lib.ReductionToOneDevice(reduce_to_device=_LOCAL_CPU))
def _initialize_strategy(self,
cluster_resolver=None,
compute_devices=None,
parameter_device=None):
if cluster_resolver and cluster_resolver.cluster_spec():
self._initialize_multi_worker(cluster_resolver)
else:
self._initialize_local(
compute_devices, parameter_device, cluster_resolver=cluster_resolver)
def _initialize_multi_worker(self, cluster_resolver):
"""Initialize devices for multiple workers.
It creates variable devices and compute devices. Variables and operations
will be assigned to them respectively. We have one compute device per
replica. The variable device is a device function or device string. The
default variable device assigns variables to parameter servers in a
round-robin fashion.
Args:
cluster_resolver: a descendant of `ClusterResolver` object.
Raises:
ValueError: if the cluster doesn't have ps jobs.
"""
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
if isinstance(cluster_resolver, TFConfigClusterResolver):
num_gpus = context.num_gpus()
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
# Save the num_gpus_per_worker for configure method.
self._num_gpus_per_worker = num_gpus
cluster_spec = cluster_resolver.cluster_spec()
task_type = cluster_resolver.task_type
task_id = cluster_resolver.task_id
if not task_type or task_id is None:
raise ValueError("When `cluster_spec` is given, you must also specify "
"`task_type` and `task_id`")
cluster_spec = multi_worker_util.normalize_cluster_spec(cluster_spec)
assert cluster_spec.as_dict()
worker_device = "/job:%s/task:%d" % (task_type, task_id)
self._input_host_device = numpy_dataset.SingleDevice(worker_device)
# Define compute devices which is a list of device strings and one for each
# replica. When there are GPUs, replicate operations on these GPUs.
# Otherwise, place operations on CPU.
if num_gpus > 0:
compute_devices = tuple(
"%s/device:GPU:%d" % (worker_device, i) for i in range(num_gpus))
else:
compute_devices = (worker_device,)
self._compute_devices = [
device_util.canonicalize(d) for d in compute_devices]
self._input_workers = input_lib.InputWorkers(
[(worker_device, compute_devices)])
# In distributed mode, place variables on ps jobs in a round-robin fashion.
# Note that devices returned from `replica_device_setter` are not
# canonical and therefore we don't canonicalize all variable devices to
# make them consistent.
# TODO(yuefengz): support passing a strategy object to control variable
# assignment.
# TODO(yuefengz): merge the logic of replica_device_setter into this
# class.
num_ps_replicas = len(cluster_spec.as_dict().get("ps", []))
if num_ps_replicas == 0:
raise ValueError("The cluster spec needs to have `ps` jobs.")
self._variable_device = device_setter.replica_device_setter(
ps_tasks=num_ps_replicas,
worker_device=worker_device,
merge_devices=True,
cluster=cluster_spec)
# The `_parameter_devices` is needed for the `parameter_devices` property
# and is a list of all variable devices. Here parameter devices are all
# tasks of the "ps" job.
self._parameter_devices = tuple(map("/job:ps/task:{}".format,
range(num_ps_replicas)))
# Add a default device so that ops without specified devices will not end up
# on other workers.
self._default_device = worker_device
self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
task_id)
self._cluster_spec = cluster_spec
self._task_type = task_type
self._task_id = task_id
logging.info(
"Multi-worker ParameterServerStrategy with "
"cluster_spec = %r, task_type = %r, task_id = %r, "
"num_ps_replicas = %r, is_chief = %r, compute_devices = %r, "
"variable_device = %r", cluster_spec.as_dict(), task_type, task_id,
num_ps_replicas, self._is_chief, self._compute_devices,
self._variable_device)
# TODO(yuefengz): get rid of cluster_resolver argument when contrib's
# version no longer depends on this class.
def _initialize_local(self,
compute_devices,
parameter_device,
cluster_resolver=None):
"""Initialize local devices for training."""
worker_device = device_util.canonicalize("/device:CPU:0")
self._input_host_device = numpy_dataset.SingleDevice(worker_device)
if compute_devices is None:
if not cluster_resolver:
num_gpus = context.num_gpus()
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
# Save the num_gpus_per_worker for configure method which is used by the
# contrib version.
self._num_gpus_per_worker = num_gpus
compute_devices = device_util.local_devices_from_num_gpus(num_gpus)
compute_devices = [device_util.canonicalize(d) for d in compute_devices]
if parameter_device is None:
# If there is only one GPU, put everything on that GPU. Otherwise, place
# variables on CPU.
if len(compute_devices) == 1:
parameter_device = compute_devices[0]
else:
parameter_device = _LOCAL_CPU
self._input_workers = input_lib.InputWorkers(
[(worker_device, compute_devices)])
self._variable_device = parameter_device
self._compute_devices = compute_devices
self._parameter_devices = (parameter_device,)
self._is_chief = True
self._cluster_spec = None
self._task_type = None
self._task_id = None
logging.info(
"ParameterServerStrategy (CentralStorageStrategy if you are using a "
"single machine) with compute_devices = %r, variable_device = %r",
compute_devices, self._variable_device)
def _validate_colocate_with_variable(self, colocate_with_variable):
values.validate_colocate(colocate_with_variable, self)
def _experimental_distribute_dataset(self, dataset):
return input_lib.get_distributed_dataset(
dataset,
self._input_workers,
self._container_strategy(),
split_batch_by=self._num_replicas_in_sync)
def _make_dataset_iterator(self, dataset):
return input_lib.DatasetIterator(
dataset,
self._input_workers,
self._container_strategy(),
split_batch_by=self._num_replicas_in_sync)
def _make_input_fn_iterator(
self,
input_fn,
replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
"""Distributes the dataset to each local GPU."""
if self._cluster_spec:
input_pipeline_id = multi_worker_util.id_in_cluster(
self._cluster_spec, self._task_type, self._task_id)
num_input_pipelines = multi_worker_util.worker_count(
self._cluster_spec, self._task_type)
else:
input_pipeline_id = 0
num_input_pipelines = 1
input_context = distribute_lib.InputContext(
num_input_pipelines=num_input_pipelines,
input_pipeline_id=input_pipeline_id,
num_replicas_in_sync=self._num_replicas_in_sync)
return input_lib.InputFunctionIterator(input_fn, self._input_workers,
[input_context],
self._container_strategy())
def _experimental_make_numpy_dataset(self, numpy_input, session):
return numpy_dataset.one_host_numpy_dataset(
numpy_input, self._input_host_device, session)
def _experimental_distribute_datasets_from_function(self, dataset_fn):
if self._cluster_spec:
input_pipeline_id = multi_worker_util.id_in_cluster(
self._cluster_spec, self._task_type, self._task_id)
num_input_pipelines = multi_worker_util.worker_count(
self._cluster_spec, self._task_type)
else:
input_pipeline_id = 0
num_input_pipelines = 1
input_context = distribute_lib.InputContext(
num_input_pipelines=num_input_pipelines,
input_pipeline_id=input_pipeline_id,
num_replicas_in_sync=self._num_replicas_in_sync)
return input_lib.get_distributed_datasets_from_function(
dataset_fn,
self._input_workers,
[input_context],
self._container_strategy())
def _experimental_distribute_values_from_function(self, value_fn):
# TODO(b/137795644): Implement this method for ParameterServerStrategy if
# needed.
raise NotImplementedError("_experimental_distribute_values_from_function "
"not yet implemented in ParameterServerStrategy.")
def _broadcast_to(self, tensor, destinations):
# This is both a fast path for Python constants, and a way to delay
# converting Python values to a tensor until we know what type it
# should be converted to. Otherwise we have trouble with:
# global_step.assign_add(1)
# since the `1` gets broadcast as an int32 but global_step is int64.
if isinstance(tensor, (float, int)):
return tensor
if not cross_device_ops_lib.check_destinations(destinations):
# TODO(josh11b): Use current logical device instead of 0 here.
destinations = self._compute_devices
return self._cross_device_ops.broadcast(tensor, destinations)
def _allow_variable_partition(self):
return not context.executing_eagerly()
# TODO(yuefengz): Not all ops in device_setter.STANDARD_PS_OPS will go through
# this creator, such as "MutableHashTable".
def _create_variable(self, next_creator, **kwargs):
if self._num_replicas_in_sync > 1:
aggregation = kwargs.pop("aggregation", vs.VariableAggregation.NONE)
if aggregation not in (
vs.VariableAggregation.NONE,
vs.VariableAggregation.SUM,
vs.VariableAggregation.MEAN,
vs.VariableAggregation.ONLY_FIRST_REPLICA
):
raise ValueError("Invalid variable aggregation mode: " + aggregation +
" for variable: " + kwargs["name"])
def var_creator(**kwargs):
"""Create an AggregatingVariable and fix up collections."""
# Record what collections this variable should be added to.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
# Create and wrap the variable.
v = next_creator(**kwargs)
wrapped = values.AggregatingVariable(
self._container_strategy(), v, aggregation)
# Add the wrapped variable to the requested collections.
# The handling of eager mode and the global step matches
# ResourceVariable._init_from_args().
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the contained
# variable to the TRAINABLE_VARIABLES collection, so we manually
# remove it and replace with the wrapper. We can't set "trainable"
# to False for next_creator() since that causes functions like
# implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
if v in l:
l.remove(v)
g.add_to_collections(collections, wrapped)
elif ops.GraphKeys.GLOBAL_STEP in collections:
ops.add_to_collections(ops.GraphKeys.GLOBAL_STEP, wrapped)
return wrapped
else:
var_creator = next_creator
if "colocate_with" in kwargs:
colocate_with = kwargs["colocate_with"]
if isinstance(colocate_with, numpy_dataset.SingleDevice):
with ops.device(colocate_with.device):
return var_creator(**kwargs)
with ops.device(None):
with ops.colocate_with(colocate_with):
return var_creator(**kwargs)
with ops.colocate_with(None, ignore_existing=True):
with ops.device(self._variable_device):
return var_creator(**kwargs)
def _call_for_each_replica(self, fn, args, kwargs):
# pylint: disable=protected-access
return mirrored_strategy._call_for_each_replica(
self._container_strategy(), self._compute_devices, fn, args, kwargs)
def _verify_destinations_not_different_worker(self, destinations):
if not self._cluster_spec:
return
if destinations is None:
return
for d in cross_device_ops_lib.get_devices_from(destinations):
d_spec = tf_device.DeviceSpec.from_string(d)
if d_spec.job == self._task_type and d_spec.task != self._task_id:
raise ValueError(
"Cannot reduce to another worker: %r, current worker is %r" %
(d, self._input_workers.worker_devices[0]))
def _reduce_to(self, reduce_op, value, destinations, experimental_hints):
self._verify_destinations_not_different_worker(destinations)
if not isinstance(value, values.DistributedValues):
# pylint: disable=protected-access
return cross_device_ops_lib.reduce_non_distributed_value(
reduce_op, value, destinations, self._num_replicas_in_sync)
return self._cross_device_ops.reduce(
reduce_op,
value,
destinations=destinations,
experimental_hints=experimental_hints)
def _batch_reduce_to(self, reduce_op, value_destination_pairs,
experimental_hints):
for _, destinations in value_destination_pairs:
self._verify_destinations_not_different_worker(destinations)
return self._cross_device_ops.batch_reduce(reduce_op,
value_destination_pairs,
experimental_hints)
def _select_single_value(self, structured):
"""Select any single value in `structured`."""
def _select_fn(x): # pylint: disable=g-missing-docstring
if isinstance(x, values.Mirrored):
if len(x._devices) == 1: # pylint: disable=protected-access
return x._primary # pylint: disable=protected-access
else:
raise ValueError(
"You cannot update variable with a Mirrored object with multiple "
"components %r when using ParameterServerStrategy. You must "
"specify a single value or a Mirrored with a single value." % x)
elif isinstance(x, values.PerReplica):
raise ValueError(
"You cannot update variable with a PerReplica object %r when using "
"ParameterServerStrategy. You must specify a single value or a "
"Mirrored with a single value" % x)
else:
return x
return nest.map_structure(_select_fn, structured)
def _update(self, var, fn, args, kwargs, group):
if isinstance(var, values.AggregatingVariable):
var = var.get()
if not resource_variable_ops.is_resource_variable(var):
raise ValueError(
"You can not update `var` %r. It must be a Variable." % var)
with ops.colocate_with(var), distribute_lib.UpdateContext(var.device):
result = fn(var, *self._select_single_value(args),
**self._select_single_value(kwargs))
if group:
return result
else:
return nest.map_structure(self._local_results, result)
# TODO(yuefengz): does it need to call _select_single_value?
def _update_non_slot(self, colocate_with, fn, args, kwargs, group):
with ops.device(
colocate_with.device), distribute_lib.UpdateContext(colocate_with):
result = fn(*args, **kwargs)
if group:
return result
else:
return nest.map_structure(self._local_results, result)
def _local_results(self, val):
if isinstance(val, values.DistributedValues):
return val.values
return (val,)
def value_container(self, val):
if (hasattr(val, "_aggregating_container") and
not isinstance(val, values.AggregatingVariable)):
wrapper = val._aggregating_container() # pylint: disable=protected-access
if wrapper is not None:
return wrapper
return val
def read_var(self, var):
# No need to distinguish between normal variables and replica-local
# variables.
return array_ops.identity(var)
def _configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
"""Configures the strategy class with `cluster_spec`.
The strategy object will be re-initialized if `cluster_spec` is passed to
`configure` but was not passed when instantiating the strategy.
Args:
session_config: Session config object.
cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
cluster configurations.
task_type: the current task type.
task_id: the current task id.
Raises:
ValueError: if `cluster_spec` is given but `task_type` or `task_id` is
not.
"""
if cluster_spec:
# Use the num_gpus_per_worker recorded in constructor since _configure
# doesn't take num_gpus.
cluster_resolver = SimpleClusterResolver(
cluster_spec=multi_worker_util.normalize_cluster_spec(cluster_spec),
task_type=task_type,
task_id=task_id,
num_accelerators={"GPU": self._num_gpus_per_worker})
self._initialize_multi_worker(cluster_resolver)
if session_config:
session_config.CopyFrom(self._update_config_proto(session_config))
def _update_config_proto(self, config_proto):
updated_config = copy.deepcopy(config_proto)
if not self._cluster_spec:
updated_config.isolate_session_state = True
return updated_config
updated_config.isolate_session_state = False
assert self._task_type
assert self._task_id is not None
# The device filters prevent communication between workers.
del updated_config.device_filters[:]
if self._task_type in ["chief", "worker"]:
updated_config.device_filters.extend(
["/job:%s/task:%d" % (self._task_type, self._task_id), "/job:ps"])
elif self._task_type == "evaluator":
updated_config.device_filters.append(
"/job:%s/task:%d" % (self._task_type, self._task_id))
return updated_config
def _in_multi_worker_mode(self):
"""Whether this strategy indicates working in multi-worker settings."""
return self._cluster_spec is not None
@property
def _num_replicas_in_sync(self):
return len(self._compute_devices)
@property
def worker_devices(self):
return self._compute_devices
@property
def worker_devices_by_replica(self):
return [[d] for d in self._compute_devices]
@property
def parameter_devices(self):
return self._parameter_devices
def non_slot_devices(self, var_list):
return min(var_list, key=lambda x: x.name)
@property
def experimental_between_graph(self):
# TODO(yuefengz): Should this return False in the local case?
return True
@property
def experimental_should_init(self):
return self._is_chief
@property
def should_checkpoint(self):
return self._is_chief
@property
def should_save_summary(self):
return self._is_chief
# TODO(priyag): Delete this once all strategies use global batch size.
@property
def _global_batch_size(self):
"""`make_dataset_iterator` and `make_numpy_iterator` use global batch size.
`make_input_fn_iterator` assumes per-replica batching.
Returns:
Boolean.
"""
return True
| |
# -*- coding: utf-8 -*-
"""
sphinx.directives.code
~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
import codecs
from difflib import unified_diff
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.statemachine import ViewList
from six import string_types
from sphinx import addnodes
from sphinx.locale import _
from sphinx.util import parselinenos
from sphinx.util.nodes import set_source_info
class Highlight(Directive):
"""
Directive to set the highlighting language for code blocks, as well
as the threshold for line numbers.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'linenothreshold': directives.unchanged,
}
def run(self):
if 'linenothreshold' in self.options:
try:
linenothreshold = int(self.options['linenothreshold'])
except Exception:
linenothreshold = 10
else:
linenothreshold = sys.maxsize
return [addnodes.highlightlang(lang=self.arguments[0].strip(),
linenothreshold=linenothreshold)]
def dedent_lines(lines, dedent):
if not dedent:
return lines
new_lines = []
for line in lines:
new_line = line[dedent:]
if line.endswith('\n') and not new_line:
new_line = '\n' # keep CRLF
new_lines.append(new_line)
return new_lines
def container_wrapper(directive, literal_node, caption):
container_node = nodes.container('', literal_block=True,
classes=['literal-block-wrapper'])
parsed = nodes.Element()
directive.state.nested_parse(ViewList([caption], source=''),
directive.content_offset, parsed)
if isinstance(parsed[0], nodes.system_message):
raise ValueError(parsed[0])
caption_node = nodes.caption(parsed[0].rawsource, '',
*parsed[0].children)
caption_node.source = literal_node.source
caption_node.line = literal_node.line
container_node += caption_node
container_node += literal_node
return container_node
class CodeBlock(Directive):
"""
Directive for a code block with special highlighting or line numbering
settings.
"""
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'linenos': directives.flag,
'dedent': int,
'lineno-start': int,
'emphasize-lines': directives.unchanged_required,
'caption': directives.unchanged_required,
'class': directives.class_option,
'name': directives.unchanged,
}
def run(self):
code = u'\n'.join(self.content)
linespec = self.options.get('emphasize-lines')
if linespec:
try:
nlines = len(self.content)
hl_lines = [x + 1 for x in parselinenos(linespec, nlines)]
except ValueError as err:
document = self.state.document
return [document.reporter.warning(str(err), line=self.lineno)]
else:
hl_lines = None
if 'dedent' in self.options:
lines = code.split('\n')
lines = dedent_lines(lines, self.options['dedent'])
code = '\n'.join(lines)
literal = nodes.literal_block(code, code)
literal['language'] = self.arguments[0]
literal['linenos'] = 'linenos' in self.options or \
'lineno-start' in self.options
literal['classes'] += self.options.get('class', [])
extra_args = literal['highlight_args'] = {}
if hl_lines is not None:
extra_args['hl_lines'] = hl_lines
if 'lineno-start' in self.options:
extra_args['linenostart'] = self.options['lineno-start']
set_source_info(self, literal)
caption = self.options.get('caption')
if caption:
try:
literal = container_wrapper(self, literal, caption)
except ValueError as exc:
document = self.state.document
errmsg = _('Invalid caption: %s' % exc[0][0].astext())
return [document.reporter.warning(errmsg, line=self.lineno)]
# literal will be note_implicit_target that is linked from caption and numref.
# when options['name'] is provided, it should be primary ID.
self.add_name(literal)
return [literal]
class LiteralInclude(Directive):
"""
Like ``.. include:: :literal:``, but only warns if the include file is
not found, and does not raise errors. Also has several options for
selecting what to include.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'dedent': int,
'linenos': directives.flag,
'lineno-start': int,
'lineno-match': directives.flag,
'tab-width': int,
'language': directives.unchanged_required,
'encoding': directives.encoding,
'pyobject': directives.unchanged_required,
'lines': directives.unchanged_required,
'start-after': directives.unchanged_required,
'end-before': directives.unchanged_required,
'start-at': directives.unchanged_required,
'end-at': directives.unchanged_required,
'prepend': directives.unchanged_required,
'append': directives.unchanged_required,
'emphasize-lines': directives.unchanged_required,
'caption': directives.unchanged,
'class': directives.class_option,
'name': directives.unchanged,
'diff': directives.unchanged_required,
}
def read_with_encoding(self, filename, document, codec_info, encoding):
try:
with codecs.StreamReaderWriter(open(filename, 'rb'), codec_info[2],
codec_info[3], 'strict') as f:
lines = f.readlines()
lines = dedent_lines(lines, self.options.get('dedent'))
return lines
except (IOError, OSError):
return [document.reporter.warning(
'Include file %r not found or reading it failed' % filename,
line=self.lineno)]
except UnicodeError:
return [document.reporter.warning(
'Encoding %r used for reading included file %r seems to '
'be wrong, try giving an :encoding: option' %
(encoding, filename))]
def run(self):
document = self.state.document
if not document.settings.file_insertion_enabled:
return [document.reporter.warning('File insertion disabled',
line=self.lineno)]
env = document.settings.env
rel_filename, filename = env.relfn2path(self.arguments[0])
if 'pyobject' in self.options and 'lines' in self.options:
return [document.reporter.warning(
'Cannot use both "pyobject" and "lines" options',
line=self.lineno)]
if 'lineno-match' in self.options and 'lineno-start' in self.options:
return [document.reporter.warning(
'Cannot use both "lineno-match" and "lineno-start"',
line=self.lineno)]
if 'lineno-match' in self.options and \
(set(['append', 'prepend']) & set(self.options.keys())):
return [document.reporter.warning(
'Cannot use "lineno-match" and "append" or "prepend"',
line=self.lineno)]
if 'start-after' in self.options and 'start-at' in self.options:
return [document.reporter.warning(
'Cannot use both "start-after" and "start-at" options',
line=self.lineno)]
if 'end-before' in self.options and 'end-at' in self.options:
return [document.reporter.warning(
'Cannot use both "end-before" and "end-at" options',
line=self.lineno)]
encoding = self.options.get('encoding', env.config.source_encoding)
codec_info = codecs.lookup(encoding)
lines = self.read_with_encoding(filename, document,
codec_info, encoding)
if lines and not isinstance(lines[0], string_types):
return lines
diffsource = self.options.get('diff')
if diffsource is not None:
tmp, fulldiffsource = env.relfn2path(diffsource)
difflines = self.read_with_encoding(fulldiffsource, document,
codec_info, encoding)
if not isinstance(difflines[0], string_types):
return difflines
diff = unified_diff(
difflines,
lines,
diffsource,
self.arguments[0])
lines = list(diff)
linenostart = self.options.get('lineno-start', 1)
objectname = self.options.get('pyobject')
if objectname is not None:
from sphinx.pycode import ModuleAnalyzer
analyzer = ModuleAnalyzer.for_file(filename, '')
tags = analyzer.find_tags()
if objectname not in tags:
return [document.reporter.warning(
'Object named %r not found in include file %r' %
(objectname, filename), line=self.lineno)]
else:
lines = lines[tags[objectname][1] - 1: tags[objectname][2] - 1]
if 'lineno-match' in self.options:
linenostart = tags[objectname][1]
linespec = self.options.get('lines')
if linespec:
try:
linelist = parselinenos(linespec, len(lines))
except ValueError as err:
return [document.reporter.warning(str(err), line=self.lineno)]
if 'lineno-match' in self.options:
# make sure the line list is not "disjoint".
previous = linelist[0]
for line_number in linelist[1:]:
if line_number == previous + 1:
previous = line_number
continue
return [document.reporter.warning(
'Cannot use "lineno-match" with a disjoint set of '
'"lines"', line=self.lineno)]
linenostart = linelist[0] + 1
# just ignore non-existing lines
lines = [lines[i] for i in linelist if i < len(lines)]
if not lines:
return [document.reporter.warning(
'Line spec %r: no lines pulled from include file %r' %
(linespec, filename), line=self.lineno)]
linespec = self.options.get('emphasize-lines')
if linespec:
try:
hl_lines = [x + 1 for x in parselinenos(linespec, len(lines))]
except ValueError as err:
return [document.reporter.warning(str(err), line=self.lineno)]
else:
hl_lines = None
start_str = self.options.get('start-after')
start_inclusive = False
if self.options.get('start-at') is not None:
start_str = self.options.get('start-at')
start_inclusive = True
end_str = self.options.get('end-before')
end_inclusive = False
if self.options.get('end-at') is not None:
end_str = self.options.get('end-at')
end_inclusive = True
if start_str is not None or end_str is not None:
use = not start_str
res = []
for line_number, line in enumerate(lines):
if not use and start_str and start_str in line:
if 'lineno-match' in self.options:
linenostart += line_number + 1
use = True
if start_inclusive:
res.append(line)
elif use and end_str and end_str in line:
if end_inclusive:
res.append(line)
break
elif use:
res.append(line)
lines = res
prepend = self.options.get('prepend')
if prepend:
lines.insert(0, prepend + '\n')
append = self.options.get('append')
if append:
lines.append(append + '\n')
text = ''.join(lines)
if self.options.get('tab-width'):
text = text.expandtabs(self.options['tab-width'])
retnode = nodes.literal_block(text, text, source=filename)
set_source_info(self, retnode)
if diffsource: # if diff is set, set udiff
retnode['language'] = 'udiff'
if 'language' in self.options:
retnode['language'] = self.options['language']
retnode['linenos'] = 'linenos' in self.options or \
'lineno-start' in self.options or \
'lineno-match' in self.options
retnode['classes'] += self.options.get('class', [])
extra_args = retnode['highlight_args'] = {}
if hl_lines is not None:
extra_args['hl_lines'] = hl_lines
extra_args['linenostart'] = linenostart
env.note_dependency(rel_filename)
caption = self.options.get('caption')
if caption is not None:
if not caption:
caption = self.arguments[0]
try:
retnode = container_wrapper(self, retnode, caption)
except ValueError as exc:
document = self.state.document
errmsg = _('Invalid caption: %s' % exc[0][0].astext())
return [document.reporter.warning(errmsg, line=self.lineno)]
# retnode will be note_implicit_target that is linked from caption and numref.
# when options['name'] is provided, it should be primary ID.
self.add_name(retnode)
return [retnode]
def setup(app):
directives.register_directive('highlight', Highlight)
directives.register_directive('highlightlang', Highlight) # old
directives.register_directive('code-block', CodeBlock)
directives.register_directive('sourcecode', CodeBlock)
directives.register_directive('literalinclude', LiteralInclude)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""Core classes for markup processing."""
try:
reduce # builtin in Python < 3
except NameError:
from functools import reduce
import sys
from itertools import chain
import operator
from genshi.util import plaintext, stripentities, striptags, stringrepr
__all__ = ['Stream', 'Markup', 'escape', 'unescape', 'Attrs', 'Namespace',
'QName']
__docformat__ = 'restructuredtext en'
class StreamEventKind(str):
"""A kind of event on a markup stream."""
__slots__ = []
_instances = {}
def __new__(cls, val):
return cls._instances.setdefault(val, str.__new__(cls, val))
class Stream(object):
"""Represents a stream of markup events.
This class is basically an iterator over the events.
Stream events are tuples of the form::
(kind, data, position)
where ``kind`` is the event kind (such as `START`, `END`, `TEXT`, etc),
``data`` depends on the kind of event, and ``position`` is a
``(filename, line, offset)`` tuple that contains the location of the
original element or text in the input. If the original location is unknown,
``position`` is ``(None, -1, -1)``.
Also provided are ways to serialize the stream to text. The `serialize()`
method will return an iterator over generated strings, while `render()`
returns the complete generated text at once. Both accept various parameters
that impact the way the stream is serialized.
"""
__slots__ = ['events', 'serializer']
START = StreamEventKind('START') #: a start tag
END = StreamEventKind('END') #: an end tag
TEXT = StreamEventKind('TEXT') #: literal text
XML_DECL = StreamEventKind('XML_DECL') #: XML declaration
DOCTYPE = StreamEventKind('DOCTYPE') #: doctype declaration
START_NS = StreamEventKind('START_NS') #: start namespace mapping
END_NS = StreamEventKind('END_NS') #: end namespace mapping
START_CDATA = StreamEventKind('START_CDATA') #: start CDATA section
END_CDATA = StreamEventKind('END_CDATA') #: end CDATA section
PI = StreamEventKind('PI') #: processing instruction
COMMENT = StreamEventKind('COMMENT') #: comment
def __init__(self, events, serializer=None):
"""Initialize the stream with a sequence of markup events.
:param events: a sequence or iterable providing the events
:param serializer: the default serialization method to use for this
stream
:note: Changed in 0.5: added the `serializer` argument
"""
self.events = events #: The underlying iterable producing the events
self.serializer = serializer #: The default serializion method
def __iter__(self):
return iter(self.events)
def __or__(self, function):
"""Override the "bitwise or" operator to apply filters or serializers
to the stream, providing a syntax similar to pipes on Unix shells.
Assume the following stream produced by the `HTML` function:
>>> from genshi.input import HTML
>>> html = HTML('''<p onclick="alert('Whoa')">Hello, world!</p>''', encoding='utf-8')
>>> print(html)
<p onclick="alert('Whoa')">Hello, world!</p>
A filter such as the HTML sanitizer can be applied to that stream using
the pipe notation as follows:
>>> from genshi.filters import HTMLSanitizer
>>> sanitizer = HTMLSanitizer()
>>> print(html | sanitizer)
<p>Hello, world!</p>
Filters can be any function that accepts and produces a stream (where
a stream is anything that iterates over events):
>>> def uppercase(stream):
... for kind, data, pos in stream:
... if kind is TEXT:
... data = data.upper()
... yield kind, data, pos
>>> print(html | sanitizer | uppercase)
<p>HELLO, WORLD!</p>
Serializers can also be used with this notation:
>>> from genshi.output import TextSerializer
>>> output = TextSerializer()
>>> print(html | sanitizer | uppercase | output)
HELLO, WORLD!
Commonly, serializers should be used at the end of the "pipeline";
using them somewhere in the middle may produce unexpected results.
:param function: the callable object that should be applied as a filter
:return: the filtered stream
:rtype: `Stream`
"""
return Stream(_ensure(function(self)), serializer=self.serializer)
def filter(self, *filters):
"""Apply filters to the stream.
This method returns a new stream with the given filters applied. The
filters must be callables that accept the stream object as parameter,
and return the filtered stream.
The call::
stream.filter(filter1, filter2)
is equivalent to::
stream | filter1 | filter2
:param filters: one or more callable objects that should be applied as
filters
:return: the filtered stream
:rtype: `Stream`
"""
return reduce(operator.or_, (self,) + filters)
def render(self, method=None, encoding=None, out=None, **kwargs):
"""Return a string representation of the stream.
Any additional keyword arguments are passed to the serializer, and thus
depend on the `method` parameter value.
:param method: determines how the stream is serialized; can be either
"xml", "xhtml", "html", "text", or a custom serializer
class; if `None`, the default serialization method of
the stream is used
:param encoding: how the output string should be encoded; if set to
`None`, this method returns a `unicode` object
:param out: a file-like object that the output should be written to
instead of being returned as one big string; note that if
this is a file or socket (or similar), the `encoding` must
not be `None` (that is, the output must be encoded)
:return: a `str` or `unicode` object (depending on the `encoding`
parameter), or `None` if the `out` parameter is provided
:rtype: `basestring`
:see: XMLSerializer, XHTMLSerializer, HTMLSerializer, TextSerializer
:note: Changed in 0.5: added the `out` parameter
"""
from genshi.output import encode
if method is None:
method = self.serializer or 'xml'
generator = self.serialize(method=method, **kwargs)
return encode(generator, method=method, encoding=encoding, out=out)
def select(self, path, namespaces=None, variables=None):
"""Return a new stream that contains the events matching the given
XPath expression.
>>> from genshi import HTML
>>> stream = HTML('<doc><elem>foo</elem><elem>bar</elem></doc>', encoding='utf-8')
>>> print(stream.select('elem'))
<elem>foo</elem><elem>bar</elem>
>>> print(stream.select('elem/text()'))
foobar
Note that the outermost element of the stream becomes the *context
node* for the XPath test. That means that the expression "doc" would
not match anything in the example above, because it only tests against
child elements of the outermost element:
>>> print(stream.select('doc'))
<BLANKLINE>
You can use the "." expression to match the context node itself
(although that usually makes little sense):
>>> print(stream.select('.'))
<doc><elem>foo</elem><elem>bar</elem></doc>
:param path: a string containing the XPath expression
:param namespaces: mapping of namespace prefixes used in the path
:param variables: mapping of variable names to values
:return: the selected substream
:rtype: `Stream`
:raises PathSyntaxError: if the given path expression is invalid or not
supported
"""
from genshi.path import Path
return Path(path).select(self, namespaces, variables)
def serialize(self, method='xml', **kwargs):
"""Generate strings corresponding to a specific serialization of the
stream.
Unlike the `render()` method, this method is a generator that returns
the serialized output incrementally, as opposed to returning a single
string.
Any additional keyword arguments are passed to the serializer, and thus
depend on the `method` parameter value.
:param method: determines how the stream is serialized; can be either
"xml", "xhtml", "html", "text", or a custom serializer
class; if `None`, the default serialization method of
the stream is used
:return: an iterator over the serialization results (`Markup` or
`unicode` objects, depending on the serialization method)
:rtype: ``iterator``
:see: XMLSerializer, XHTMLSerializer, HTMLSerializer, TextSerializer
"""
from genshi.output import get_serializer
if method is None:
method = self.serializer or 'xml'
return get_serializer(method, **kwargs)(_ensure(self))
def __str__(self):
return self.render()
def __unicode__(self):
return self.render(encoding=None)
def __html__(self):
return self
START = Stream.START
END = Stream.END
TEXT = Stream.TEXT
XML_DECL = Stream.XML_DECL
DOCTYPE = Stream.DOCTYPE
START_NS = Stream.START_NS
END_NS = Stream.END_NS
START_CDATA = Stream.START_CDATA
END_CDATA = Stream.END_CDATA
PI = Stream.PI
COMMENT = Stream.COMMENT
def _ensure(stream):
"""Ensure that every item on the stream is actually a markup event."""
stream = iter(stream)
event = stream.next()
# Check whether the iterable is a real markup event stream by examining the
# first item it yields; if it's not we'll need to do some conversion
if type(event) is not tuple or len(event) != 3:
for event in chain([event], stream):
if hasattr(event, 'totuple'):
event = event.totuple()
else:
event = TEXT, unicode(event), (None, -1, -1)
yield event
return
# This looks like a markup event stream, so we'll just pass it through
# unchanged
yield event
for event in stream:
yield event
class Attrs(tuple):
"""Immutable sequence type that stores the attributes of an element.
Ordering of the attributes is preserved, while access by name is also
supported.
>>> attrs = Attrs([('href', '#'), ('title', 'Foo')])
>>> attrs
Attrs([('href', '#'), ('title', 'Foo')])
>>> 'href' in attrs
True
>>> 'tabindex' in attrs
False
>>> attrs.get('title')
'Foo'
Instances may not be manipulated directly. Instead, the operators ``|`` and
``-`` can be used to produce new instances that have specific attributes
added, replaced or removed.
To remove an attribute, use the ``-`` operator. The right hand side can be
either a string or a set/sequence of strings, identifying the name(s) of
the attribute(s) to remove:
>>> attrs - 'title'
Attrs([('href', '#')])
>>> attrs - ('title', 'href')
Attrs()
The original instance is not modified, but the operator can of course be
used with an assignment:
>>> attrs
Attrs([('href', '#'), ('title', 'Foo')])
>>> attrs -= 'title'
>>> attrs
Attrs([('href', '#')])
To add a new attribute, use the ``|`` operator, where the right hand value
is a sequence of ``(name, value)`` tuples (which includes `Attrs`
instances):
>>> attrs | [('title', 'Bar')]
Attrs([('href', '#'), ('title', 'Bar')])
If the attributes already contain an attribute with a given name, the value
of that attribute is replaced:
>>> attrs | [('href', 'http://example.org/')]
Attrs([('href', 'http://example.org/')])
"""
__slots__ = []
def __contains__(self, name):
"""Return whether the list includes an attribute with the specified
name.
:return: `True` if the list includes the attribute
:rtype: `bool`
"""
for attr, _ in self:
if attr == name:
return True
def __getitem__(self, i):
"""Return an item or slice of the attributes list.
>>> attrs = Attrs([('href', '#'), ('title', 'Foo')])
>>> attrs[1]
('title', 'Foo')
>>> attrs[1:]
Attrs([('title', 'Foo')])
"""
items = tuple.__getitem__(self, i)
if type(i) is slice:
return Attrs(items)
return items
def __getslice__(self, i, j):
"""Return a slice of the attributes list.
>>> attrs = Attrs([('href', '#'), ('title', 'Foo')])
>>> attrs[1:]
Attrs([('title', 'Foo')])
"""
return Attrs(tuple.__getslice__(self, i, j))
def __or__(self, attrs):
"""Return a new instance that contains the attributes in `attrs` in
addition to any already existing attributes. Any attributes in the new
set that have a value of `None` are removed.
:return: a new instance with the merged attributes
:rtype: `Attrs`
"""
remove = set([an for an, av in attrs if av is None])
replace = dict([(an, av) for an, av in attrs
if an in self and av is not None])
return Attrs([(sn, replace.get(sn, sv)) for sn, sv in self
if sn not in remove] +
[(an, av) for an, av in attrs
if an not in self and an not in remove])
def __repr__(self):
if not self:
return 'Attrs()'
return 'Attrs([%s])' % ', '.join([repr(item) for item in self])
def __sub__(self, names):
"""Return a new instance with all attributes with a name in `names` are
removed.
:param names: the names of the attributes to remove
:return: a new instance with the attribute removed
:rtype: `Attrs`
"""
if isinstance(names, basestring):
names = (names,)
return Attrs([(name, val) for name, val in self if name not in names])
def get(self, name, default=None):
"""Return the value of the attribute with the specified name, or the
value of the `default` parameter if no such attribute is found.
:param name: the name of the attribute
:param default: the value to return when the attribute does not exist
:return: the attribute value, or the `default` value if that attribute
does not exist
:rtype: `object`
"""
for attr, value in self:
if attr == name:
return value
return default
def totuple(self):
"""Return the attributes as a markup event.
The returned event is a `TEXT` event, the data is the value of all
attributes joined together.
>>> Attrs([('href', '#'), ('title', 'Foo')]).totuple()
('TEXT', '#Foo', (None, -1, -1))
:return: a `TEXT` event
:rtype: `tuple`
"""
return TEXT, ''.join([x[1] for x in self]), (None, -1, -1)
class Markup(unicode):
"""Marks a string as being safe for inclusion in HTML/XML output without
needing to be escaped.
"""
__slots__ = []
def __add__(self, other):
return Markup(unicode.__add__(self, escape(other)))
def __radd__(self, other):
return Markup(unicode.__add__(escape(other), self))
def __mod__(self, args):
if isinstance(args, dict):
args = dict(zip(args.keys(), map(escape, args.values())))
elif isinstance(args, (list, tuple)):
args = tuple(map(escape, args))
else:
args = escape(args)
return Markup(unicode.__mod__(self, args))
def __mul__(self, num):
return Markup(unicode.__mul__(self, num))
__rmul__ = __mul__
def __repr__(self):
return "<%s %s>" % (type(self).__name__, unicode.__repr__(self))
def join(self, seq, escape_quotes=True):
"""Return a `Markup` object which is the concatenation of the strings
in the given sequence, where this `Markup` object is the separator
between the joined elements.
Any element in the sequence that is not a `Markup` instance is
automatically escaped.
:param seq: the sequence of strings to join
:param escape_quotes: whether double quote characters in the elements
should be escaped
:return: the joined `Markup` object
:rtype: `Markup`
:see: `escape`
"""
return Markup(unicode.join(self, [escape(item, quotes=escape_quotes)
for item in seq]))
@classmethod
def escape(cls, text, quotes=True):
"""Create a Markup instance from a string and escape special characters
it may contain (<, >, & and \").
>>> escape('"1 < 2"')
<Markup u'"1 < 2"'>
If the `quotes` parameter is set to `False`, the \" character is left
as is. Escaping quotes is generally only required for strings that are
to be used in attribute values.
>>> escape('"1 < 2"', quotes=False)
<Markup u'"1 < 2"'>
:param text: the text to escape
:param quotes: if ``True``, double quote characters are escaped in
addition to the other special characters
:return: the escaped `Markup` string
:rtype: `Markup`
"""
if not text:
return cls()
if type(text) is cls:
return text
if hasattr(text, '__html__'):
return cls(text.__html__())
text = text.replace('&', '&') \
.replace('<', '<') \
.replace('>', '>')
if quotes:
text = text.replace('"', '"')
return cls(text)
def unescape(self):
"""Reverse-escapes &, <, >, and \" and returns a `unicode` object.
>>> Markup('1 < 2').unescape()
u'1 < 2'
:return: the unescaped string
:rtype: `unicode`
:see: `genshi.core.unescape`
"""
if not self:
return ''
return unicode(self).replace('"', '"') \
.replace('>', '>') \
.replace('<', '<') \
.replace('&', '&')
def stripentities(self, keepxmlentities=False):
"""Return a copy of the text with any character or numeric entities
replaced by the equivalent UTF-8 characters.
If the `keepxmlentities` parameter is provided and evaluates to `True`,
the core XML entities (``&``, ``'``, ``>``, ``<`` and
``"``) are not stripped.
:return: a `Markup` instance with entities removed
:rtype: `Markup`
:see: `genshi.util.stripentities`
"""
return Markup(stripentities(self, keepxmlentities=keepxmlentities))
def striptags(self):
"""Return a copy of the text with all XML/HTML tags removed.
:return: a `Markup` instance with all tags removed
:rtype: `Markup`
:see: `genshi.util.striptags`
"""
return Markup(striptags(self))
try:
from genshi._speedups import Markup
except ImportError:
pass # just use the Python implementation
escape = Markup.escape
def unescape(text):
"""Reverse-escapes &, <, >, and \" and returns a `unicode` object.
>>> unescape(Markup('1 < 2'))
u'1 < 2'
If the provided `text` object is not a `Markup` instance, it is returned
unchanged.
>>> unescape('1 < 2')
'1 < 2'
:param text: the text to unescape
:return: the unescsaped string
:rtype: `unicode`
"""
if not isinstance(text, Markup):
return text
return text.unescape()
class Namespace(object):
"""Utility class creating and testing elements with a namespace.
Internally, namespace URIs are encoded in the `QName` of any element or
attribute, the namespace URI being enclosed in curly braces. This class
helps create and test these strings.
A `Namespace` object is instantiated with the namespace URI.
>>> html = Namespace('http://www.w3.org/1999/xhtml')
>>> html
Namespace('http://www.w3.org/1999/xhtml')
>>> html.uri
u'http://www.w3.org/1999/xhtml'
The `Namespace` object can than be used to generate `QName` objects with
that namespace:
>>> html.body
QName('http://www.w3.org/1999/xhtml}body')
>>> html.body.localname
u'body'
>>> html.body.namespace
u'http://www.w3.org/1999/xhtml'
The same works using item access notation, which is useful for element or
attribute names that are not valid Python identifiers:
>>> html['body']
QName('http://www.w3.org/1999/xhtml}body')
A `Namespace` object can also be used to test whether a specific `QName`
belongs to that namespace using the ``in`` operator:
>>> qname = html.body
>>> qname in html
True
>>> qname in Namespace('http://www.w3.org/2002/06/xhtml2')
False
"""
def __new__(cls, uri):
if type(uri) is cls:
return uri
return object.__new__(cls)
def __getnewargs__(self):
return (self.uri,)
def __getstate__(self):
return self.uri
def __setstate__(self, uri):
self.uri = uri
def __init__(self, uri):
self.uri = unicode(uri)
def __contains__(self, qname):
return qname.namespace == self.uri
def __ne__(self, other):
return not self == other
def __eq__(self, other):
if isinstance(other, Namespace):
return self.uri == other.uri
return self.uri == other
def __getitem__(self, name):
return QName(self.uri + '}' + name)
__getattr__ = __getitem__
def __hash__(self):
return hash(self.uri)
if sys.version_info[0] == 2:
# Only use stringrepr in python 2
def __repr__(self):
return '%s(%s)' % (type(self).__name__, stringrepr(self.uri))
else:
def __repr__(self):
return '%s(%r)' % (type(self).__name__, self.uri)
def __str__(self):
return self.uri.encode('utf-8')
def __unicode__(self):
return self.uri
# The namespace used by attributes such as xml:lang and xml:space
XML_NAMESPACE = Namespace('http://www.w3.org/XML/1998/namespace')
class QName(unicode):
"""A qualified element or attribute name.
The unicode value of instances of this class contains the qualified name of
the element or attribute, in the form ``{namespace-uri}local-name``. The
namespace URI can be obtained through the additional `namespace` attribute,
while the local name can be accessed through the `localname` attribute.
>>> qname = QName('foo')
>>> qname
QName('foo')
>>> qname.localname
u'foo'
>>> qname.namespace
>>> qname = QName('http://www.w3.org/1999/xhtml}body')
>>> qname
QName('http://www.w3.org/1999/xhtml}body')
>>> qname.localname
u'body'
>>> qname.namespace
u'http://www.w3.org/1999/xhtml'
"""
__slots__ = ['namespace', 'localname']
def __new__(cls, qname):
"""Create the `QName` instance.
:param qname: the qualified name as a string of the form
``{namespace-uri}local-name``, where the leading curly
brace is optional
"""
if type(qname) is cls:
return qname
qname = qname.lstrip('{')
parts = qname.split('}', 1)
if len(parts) > 1:
self = unicode.__new__(cls, '{%s' % qname)
self.namespace, self.localname = map(unicode, parts)
else:
self = unicode.__new__(cls, qname)
self.namespace, self.localname = None, unicode(qname)
return self
def __getnewargs__(self):
return (self.lstrip('{'),)
if sys.version_info[0] == 2:
# Only use stringrepr in python 2
def __repr__(self):
return '%s(%s)' % (type(self).__name__, stringrepr(self.lstrip('{')))
else:
def __repr__(self):
return '%s(%r)' % (type(self).__name__, self.lstrip('{'))
| |
"""
PynamoDB attributes
"""
import six
import json
from base64 import b64encode, b64decode
from delorean import Delorean, parse
from pynamodb.constants import (
STRING, NUMBER, BINARY, UTC, DATETIME_FORMAT, BINARY_SET, STRING_SET, NUMBER_SET,
DEFAULT_ENCODING
)
class Attribute(object):
"""
An attribute of a model
"""
attr_name = None
attr_type = None
null = False
def __init__(self,
hash_key=False,
range_key=False,
null=None,
default=None,
attr_name=None
):
self.default = default
if null is not None:
self.null = null
self.is_hash_key = hash_key
self.is_range_key = range_key
if attr_name is not None:
self.attr_name = attr_name
def __set__(self, instance, value):
if isinstance(value, Attribute):
return self
if instance:
instance.attribute_values[self.attr_name] = value
def __get__(self, instance, owner):
if instance:
return instance.attribute_values.get(self.attr_name, None)
else:
return self
def serialize(self, value):
"""
This method should return a dynamodb compatible value
"""
return value
def deserialize(self, value):
"""
Performs any needed deserialization on the value
"""
return value
class SetMixin(object):
"""
Adds (de)serialization methods for sets
"""
def serialize(self, value):
"""
Serializes a set
Because dynamodb doesn't store empty attributes,
empty sets return None
"""
if value is not None:
try:
iter(value)
except TypeError:
value = [value]
if len(value):
return [json.dumps(val) for val in sorted(value)]
return None
def deserialize(self, value):
"""
Deserializes a set
"""
if value and len(value):
return set([json.loads(val) for val in value])
class BinaryAttribute(Attribute):
"""
A binary attribute
"""
attr_type = BINARY
def serialize(self, value):
"""
Returns a base64 encoded binary string
"""
return b64encode(value).decode(DEFAULT_ENCODING)
def deserialize(self, value):
"""
Returns a decoded string from base64
"""
return b64decode(value.encode(DEFAULT_ENCODING))
class BinarySetAttribute(SetMixin, Attribute):
"""
A binary set
"""
attr_type = BINARY_SET
null = True
def serialize(self, value):
"""
Returns a base64 encoded binary string
"""
if value and len(value):
return [b64encode(val).decode(DEFAULT_ENCODING) for val in sorted(value)]
else:
return None
def deserialize(self, value):
"""
Returns a decoded string from base64
"""
if value and len(value):
return set([b64decode(val.encode(DEFAULT_ENCODING)) for val in value])
class UnicodeSetAttribute(SetMixin, Attribute):
"""
A unicode set
"""
attr_type = STRING_SET
null = True
class UnicodeAttribute(Attribute):
"""
A unicode attribute
"""
attr_type = STRING
def serialize(self, value):
"""
Returns a unicode string
"""
if value is None or not len(value):
return None
elif isinstance(value, six.text_type):
return value
else:
return six.u(value)
class JSONAttribute(Attribute):
"""
A JSON Attribute
Encodes JSON to unicode internally
"""
attr_type = STRING
def serialize(self, value):
"""
Serializes JSON to unicode
"""
if value is None:
return None
encoded = json.dumps(value)
return encoded.decode('utf-8')
# changed from six.u because it improperly removes escaping
# return six.u(encoded)
def deserialize(self, value):
"""
Deserializes JSON
"""
return json.loads(value, strict=False)
class BooleanAttribute(Attribute):
"""
A class for boolean attributes
This attribute type uses a number attribute to save space
"""
attr_type = NUMBER
def serialize(self, value):
"""
Encodes True as 1, False as 0
"""
if value is None:
return None
elif value:
return json.dumps(1)
else:
return json.dumps(0)
def deserialize(self, value):
"""
Encode
"""
return bool(json.loads(value))
class NumberSetAttribute(SetMixin, Attribute):
"""
A number set attribute
"""
attr_type = NUMBER_SET
null = True
class NumberAttribute(Attribute):
"""
A number attribute
"""
attr_type = NUMBER
def serialize(self, value):
"""
Encode numbers as JSON
"""
return json.dumps(value)
def deserialize(self, value):
"""
Decode numbers from JSON
"""
return json.loads(value)
class UTCDateTimeAttribute(Attribute):
"""
An attribute for storing a UTC Datetime
"""
attr_type = STRING
def serialize(self, value):
"""
Takes a datetime object and returns a string
"""
fmt = Delorean(value, timezone=UTC).datetime.strftime(DATETIME_FORMAT)
return six.u(fmt)
def deserialize(self, value):
"""
Takes a UTC datetime string and returns a datetime object
"""
return parse(value).datetime
| |
"""
homeassistant.bootstrap
~~~~~~~~~~~~~~~~~~~~~~~
Provides methods to bootstrap a home assistant instance.
Each method will return a tuple (bus, statemachine).
After bootstrapping you can add your own components or
start by calling homeassistant.start_home_assistant(bus)
"""
import os
import sys
import logging
import logging.handlers
from collections import defaultdict
import homeassistant.core as core
import homeassistant.util.dt as date_util
import homeassistant.util.package as pkg_util
import homeassistant.util.location as loc_util
import homeassistant.config as config_util
import homeassistant.loader as loader
import homeassistant.components as core_components
import homeassistant.components.group as group
from homeassistant.helpers.entity import Entity
from homeassistant.const import (
EVENT_COMPONENT_LOADED, CONF_LATITUDE, CONF_LONGITUDE,
CONF_TEMPERATURE_UNIT, CONF_NAME, CONF_TIME_ZONE, CONF_CUSTOMIZE,
TEMP_CELCIUS, TEMP_FAHRENHEIT)
_LOGGER = logging.getLogger(__name__)
ATTR_COMPONENT = 'component'
PLATFORM_FORMAT = '{}.{}'
def setup_component(hass, domain, config=None):
""" Setup a component and all its dependencies. """
if domain in hass.config.components:
return True
_ensure_loader_prepared(hass)
if config is None:
config = defaultdict(dict)
components = loader.load_order_component(domain)
# OrderedSet is empty if component or dependencies could not be resolved
if not components:
return False
for component in components:
if not _setup_component(hass, component, config):
return False
return True
def _handle_requirements(hass, component, name):
""" Installs requirements for component. """
if hass.config.skip_pip or not hasattr(component, 'REQUIREMENTS'):
return True
for req in component.REQUIREMENTS:
if not pkg_util.install_package(req, target=hass.config.path('lib')):
_LOGGER.error('Not initializing %s because could not install '
'dependency %s', name, req)
return False
return True
def _setup_component(hass, domain, config):
""" Setup a component for Home Assistant. """
if domain in hass.config.components:
return True
component = loader.get_component(domain)
missing_deps = [dep for dep in component.DEPENDENCIES
if dep not in hass.config.components]
if missing_deps:
_LOGGER.error(
'Not initializing %s because not all dependencies loaded: %s',
domain, ", ".join(missing_deps))
return False
if not _handle_requirements(hass, component, domain):
return False
try:
if not component.setup(hass, config):
_LOGGER.error('component %s failed to initialize', domain)
return False
except Exception: # pylint: disable=broad-except
_LOGGER.exception('Error during setup of component %s', domain)
return False
hass.config.components.append(component.DOMAIN)
# Assumption: if a component does not depend on groups
# it communicates with devices
if group.DOMAIN not in component.DEPENDENCIES:
hass.pool.add_worker()
hass.bus.fire(
EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: component.DOMAIN})
return True
def prepare_setup_platform(hass, config, domain, platform_name):
""" Loads a platform and makes sure dependencies are setup. """
_ensure_loader_prepared(hass)
platform_path = PLATFORM_FORMAT.format(domain, platform_name)
platform = loader.get_component(platform_path)
# Not found
if platform is None:
_LOGGER.error('Unable to find platform %s', platform_path)
return None
# Already loaded
elif platform_path in hass.config.components:
return platform
# Load dependencies
if hasattr(platform, 'DEPENDENCIES'):
for component in platform.DEPENDENCIES:
if not setup_component(hass, component, config):
_LOGGER.error(
'Unable to prepare setup for platform %s because '
'dependency %s could not be initialized', platform_path,
component)
return None
if not _handle_requirements(hass, platform, platform_path):
return None
return platform
def mount_local_lib_path(config_dir):
""" Add local library to Python Path """
sys.path.insert(0, os.path.join(config_dir, 'lib'))
# pylint: disable=too-many-branches, too-many-statements, too-many-arguments
def from_config_dict(config, hass=None, config_dir=None, enable_log=True,
verbose=False, daemon=False, skip_pip=False,
log_rotate_days=None):
"""
Tries to configure Home Assistant from a config dict.
Dynamically loads required components and its dependencies.
"""
if hass is None:
hass = core.HomeAssistant()
if config_dir is not None:
config_dir = os.path.abspath(config_dir)
hass.config.config_dir = config_dir
mount_local_lib_path(config_dir)
process_ha_core_config(hass, config.get(core.DOMAIN, {}))
if enable_log:
enable_logging(hass, verbose, daemon, log_rotate_days)
hass.config.skip_pip = skip_pip
if skip_pip:
_LOGGER.warning('Skipping pip installation of required modules. '
'This may cause issues.')
_ensure_loader_prepared(hass)
# Make a copy because we are mutating it.
# Convert it to defaultdict so components can always have config dict
# Convert values to dictionaries if they are None
config = defaultdict(
dict, {key: value or {} for key, value in config.items()})
# Filter out the repeating and common config section [homeassistant]
components = (key for key in config.keys()
if ' ' not in key and key != core.DOMAIN)
if not core_components.setup(hass, config):
_LOGGER.error('Home Assistant core failed to initialize. '
'Further initialization aborted.')
return hass
_LOGGER.info('Home Assistant core initialized')
# Setup the components
for domain in loader.load_order_components(components):
_setup_component(hass, domain, config)
return hass
def from_config_file(config_path, hass=None, verbose=False, daemon=False,
skip_pip=True, log_rotate_days=None):
"""
Reads the configuration file and tries to start all the required
functionality. Will add functionality to 'hass' parameter if given,
instantiates a new Home Assistant object if 'hass' is not given.
"""
if hass is None:
hass = core.HomeAssistant()
# Set config dir to directory holding config file
config_dir = os.path.abspath(os.path.dirname(config_path))
hass.config.config_dir = config_dir
mount_local_lib_path(config_dir)
enable_logging(hass, verbose, daemon, log_rotate_days)
config_dict = config_util.load_config_file(config_path)
return from_config_dict(config_dict, hass, enable_log=False,
skip_pip=skip_pip)
def enable_logging(hass, verbose=False, daemon=False, log_rotate_days=None):
""" Setup the logging for home assistant. """
if not daemon:
logging.basicConfig(level=logging.INFO)
fmt = ("%(log_color)s%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s%(reset)s")
try:
from colorlog import ColoredFormatter
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
fmt,
datefmt='%y-%m-%d %H:%M:%S',
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
except ImportError:
_LOGGER.warning(
"Colorlog package not found, console coloring disabled")
# Log errors to a file if we have write access to file or config dir
err_log_path = hass.config.path('home-assistant.log')
err_path_exists = os.path.isfile(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
(not err_path_exists and os.access(hass.config.config_dir, os.W_OK)):
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when='midnight', backupCount=log_rotate_days)
else:
err_handler = logging.FileHandler(
err_log_path, mode='w', delay=True)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(
logging.Formatter('%(asctime)s %(name)s: %(message)s',
datefmt='%y-%m-%d %H:%M:%S'))
logger = logging.getLogger('')
logger.addHandler(err_handler)
logger.setLevel(logging.INFO) # this sets the minimum log level
else:
_LOGGER.error(
'Unable to setup error log %s (access denied)', err_log_path)
def process_ha_core_config(hass, config):
""" Processes the [homeassistant] section from the config. """
hac = hass.config
def set_time_zone(time_zone_str):
""" Helper method to set time zone in HA. """
if time_zone_str is None:
return
time_zone = date_util.get_time_zone(time_zone_str)
if time_zone:
hac.time_zone = time_zone
date_util.set_default_time_zone(time_zone)
else:
_LOGGER.error('Received invalid time zone %s', time_zone_str)
for key, attr in ((CONF_LATITUDE, 'latitude'),
(CONF_LONGITUDE, 'longitude'),
(CONF_NAME, 'location_name')):
if key in config:
setattr(hac, attr, config[key])
set_time_zone(config.get(CONF_TIME_ZONE))
customize = config.get(CONF_CUSTOMIZE)
if isinstance(customize, dict):
for entity_id, attrs in config.get(CONF_CUSTOMIZE, {}).items():
if not isinstance(attrs, dict):
continue
Entity.overwrite_attribute(entity_id, attrs.keys(), attrs.values())
if CONF_TEMPERATURE_UNIT in config:
unit = config[CONF_TEMPERATURE_UNIT]
if unit == 'C':
hac.temperature_unit = TEMP_CELCIUS
elif unit == 'F':
hac.temperature_unit = TEMP_FAHRENHEIT
# If we miss some of the needed values, auto detect them
if None not in (
hac.latitude, hac.longitude, hac.temperature_unit, hac.time_zone):
return
_LOGGER.info('Auto detecting location and temperature unit')
info = loc_util.detect_location_info()
if info is None:
_LOGGER.error('Could not detect location information')
return
if hac.latitude is None and hac.longitude is None:
hac.latitude = info.latitude
hac.longitude = info.longitude
if hac.temperature_unit is None:
if info.use_fahrenheit:
hac.temperature_unit = TEMP_FAHRENHEIT
else:
hac.temperature_unit = TEMP_CELCIUS
if hac.location_name is None:
hac.location_name = info.city
if hac.time_zone is None:
set_time_zone(info.time_zone)
def _ensure_loader_prepared(hass):
""" Ensure Home Assistant loader is prepared. """
if not loader.PREPARED:
loader.prepare(hass)
| |
from __future__ import unicode_literals
import datetime
from django.contrib import admin
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.templatetags.admin_list import pagination
from django.contrib.admin.views.main import ChangeList, SEARCH_VAR, ALL_VAR
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.template import Context, Template
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from django.utils import formats
from django.utils import six
from .admin import (ChildAdmin, QuartetAdmin, BandAdmin, ChordsBandAdmin,
GroupAdmin, ParentAdmin, DynamicListDisplayChildAdmin,
DynamicListDisplayLinksChildAdmin, CustomPaginationAdmin,
FilteredChildAdmin, CustomPaginator, site as custom_site,
SwallowAdmin, DynamicListFilterChildAdmin, InvitationAdmin,
DynamicSearchFieldsChildAdmin, NoListDisplayLinksParentAdmin)
from .models import (Event, Child, Parent, Genre, Band, Musician, Group,
Quartet, Membership, ChordsMusician, ChordsBand, Invitation, Swallow,
UnorderedObject, OrderedObject, CustomIdUser)
@override_settings(ROOT_URLCONF="admin_changelist.urls")
class ChangeListTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
def _create_superuser(self, username):
return User.objects.create(username=username, is_superuser=True)
def _mocked_authenticated_request(self, url, user):
request = self.factory.get(url)
request.user = user
return request
def test_select_related_preserved(self):
"""
Regression test for #10348: ChangeList.get_queryset() shouldn't
overwrite a custom select_related provided by ModelAdmin.get_queryset().
"""
m = ChildAdmin(Child, admin.site)
request = self.factory.get('/child/')
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
self.assertEqual(cl.queryset.query.select_related, {
'parent': {'name': {}}
})
def test_select_related_as_tuple(self):
ia = InvitationAdmin(Invitation, admin.site)
request = self.factory.get('/invitation/')
cl = ChangeList(request, Child, ia.list_display, ia.list_display_links,
ia.list_filter, ia.date_hierarchy, ia.search_fields,
ia.list_select_related, ia.list_per_page,
ia.list_max_show_all, ia.list_editable, ia)
self.assertEqual(cl.queryset.query.select_related, {'player': {}})
def test_select_related_as_empty_tuple(self):
ia = InvitationAdmin(Invitation, admin.site)
ia.list_select_related = ()
request = self.factory.get('/invitation/')
cl = ChangeList(request, Child, ia.list_display, ia.list_display_links,
ia.list_filter, ia.date_hierarchy, ia.search_fields,
ia.list_select_related, ia.list_per_page,
ia.list_max_show_all, ia.list_editable, ia)
self.assertEqual(cl.queryset.query.select_related, False)
def test_result_list_empty_changelist_value(self):
"""
Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored
for relationship fields
"""
new_child = Child.objects.create(name='name', parent=None)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
link = reverse('admin:admin_changelist_child_change', args=(new_child.id,))
row_html = '<tbody><tr class="row1"><th class="field-name"><a href="%s">name</a></th><td class="field-parent nowrap">(None)</td></tr></tbody>' % link
self.assertNotEqual(table_output.find(row_html), -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_html(self):
"""
Verifies that inclusion tag result_list generates a table when with
default ModelAdmin settings.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
link = reverse('admin:admin_changelist_child_change', args=(new_child.id,))
row_html = '<tbody><tr class="row1"><th class="field-name"><a href="%s">name</a></th><td class="field-parent nowrap">Parent object</td></tr></tbody>' % link
self.assertNotEqual(table_output.find(row_html), -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_editable_html(self):
"""
Regression tests for #11791: Inclusion tag result_list generates a
table and this checks that the items are nested within the table
element tags.
Also a regression test for #13599, verifies that hidden fields
when list_editable is enabled are rendered in a div outside the
table.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
FormSet = m.get_changelist_formset(request)
cl.formset = FormSet(queryset=cl.result_list)
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
# make sure that hidden fields are in the correct place
hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id
self.assertInHTML(hiddenfields_div, table_output, msg_prefix='Failed to find hidden fields')
# make sure that list editable fields are rendered in divs correctly
editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />'
self.assertInHTML('<td class="field-name">%s</td>' % editable_name_field, table_output, msg_prefix='Failed to find "name" list_editable field')
def test_result_list_editable(self):
"""
Regression test for #14312: list_editable with pagination
"""
new_parent = Parent.objects.create(name='parent')
for i in range(200):
Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/', data={'p': -1}) # Anything outside range
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
self.assertRaises(IncorrectLookupParameters, lambda:
ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m))
def test_custom_paginator(self):
new_parent = Parent.objects.create(name='parent')
for i in range(200):
Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/')
m = CustomPaginationAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
self.assertIsInstance(cl.paginator, CustomPaginator)
def test_distinct_for_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't appear more than once. Basic ManyToMany.
"""
blues = Genre.objects.create(name='Blues')
band = Band.objects.create(name='B.B. King Review', nr_of_members=11)
band.genres.add(blues)
band.genres.add(blues)
m = BandAdmin(Band, admin.site)
request = self.factory.get('/band/', data={'genres': blues.pk})
cl = ChangeList(request, Band, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_through_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't appear more than once. With an intermediate model.
"""
lead = Musician.objects.create(name='Vox')
band = Group.objects.create(name='The Hype')
Membership.objects.create(group=band, music=lead, role='lead voice')
Membership.objects.create(group=band, music=lead, role='bass player')
m = GroupAdmin(Group, admin.site)
request = self.factory.get('/group/', data={'members': lead.pk})
cl = ChangeList(request, Group, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_inherited_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't appear more than once. Model managed in the
admin inherits from the one that defins the relationship.
"""
lead = Musician.objects.create(name='John')
four = Quartet.objects.create(name='The Beatles')
Membership.objects.create(group=four, music=lead, role='lead voice')
Membership.objects.create(group=four, music=lead, role='guitar player')
m = QuartetAdmin(Quartet, admin.site)
request = self.factory.get('/quartet/', data={'members': lead.pk})
cl = ChangeList(request, Quartet, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Quartet instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_m2m_to_inherited_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't appear more than once. Target of the relationship
inherits from another.
"""
lead = ChordsMusician.objects.create(name='Player A')
three = ChordsBand.objects.create(name='The Chords Trio')
Invitation.objects.create(band=three, player=lead, instrument='guitar')
Invitation.objects.create(band=three, player=lead, instrument='bass')
m = ChordsBandAdmin(ChordsBand, admin.site)
request = self.factory.get('/chordsband/', data={'members': lead.pk})
cl = ChangeList(request, ChordsBand, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one ChordsBand instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_non_unique_related_object_in_list_filter(self):
"""
Regressions tests for #15819: If a field listed in list_filters
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
# Two children with the same name
Child.objects.create(parent=parent, name='Daniel')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={'child__name': 'Daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.queryset.count(), 1)
def test_distinct_for_non_unique_related_object_in_search_fields(self):
"""
Regressions tests for #15819: If a field listed in search_fields
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
Child.objects.create(parent=parent, name='Danielle')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.queryset.count(), 1)
def test_pagination(self):
"""
Regression tests for #12893: Pagination in admins changelist doesn't
use queryset set by modeladmin.
"""
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
request = self.factory.get('/child/')
# Test default queryset
m = ChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.queryset.count(), 60)
self.assertEqual(cl.paginator.count, 60)
self.assertEqual(list(cl.paginator.page_range), [1, 2, 3, 4, 5, 6])
# Test custom queryset
m = FilteredChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.queryset.count(), 30)
self.assertEqual(cl.paginator.count, 30)
self.assertEqual(list(cl.paginator.page_range), [1, 2, 3])
def test_computed_list_display_localization(self):
"""
Regression test for #13196: output of functions should be localized
in the changelist.
"""
User.objects.create_superuser(
username='super', email='super@localhost', password='secret')
self.client.login(username='super', password='secret')
event = Event.objects.create(date=datetime.date.today())
response = self.client.get('/admin/admin_changelist/event/')
self.assertContains(response, formats.localize(event.date))
self.assertNotContains(response, six.text_type(event.date))
def test_dynamic_list_display(self):
"""
Regression tests for #14206: dynamic list_display support.
"""
parent = Parent.objects.create(name='parent')
for i in range(10):
Child.objects.create(name='child %s' % i, parent=parent)
user_noparents = self._create_superuser('noparents')
user_parents = self._create_superuser('parents')
# Test with user 'noparents'
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertNotContains(response, 'Parent object')
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ['name', 'age'])
self.assertEqual(list_display_links, ['name'])
# Test with user 'parents'
m = DynamicListDisplayChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', user_parents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
custom_site.unregister(Child)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['parent'])
# Test default implementation
custom_site.register(Child, ChildAdmin)
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
def test_show_all(self):
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
# Add "show all" parameter to request
request = self.factory.get('/child/', data={ALL_VAR: ''})
# Test valid "show all" request (number of total objects is under max)
m = ChildAdmin(Child, admin.site)
# 200 is the max we'll pass to ChangeList
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 200, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 60)
# Test invalid "show all" request (number of total objects over max)
# falls back to paginated pages
m = ChildAdmin(Child, admin.site)
# 30 is the max we'll pass to ChangeList for this test
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 30, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 10)
def test_dynamic_list_display_links(self):
"""
Regression tests for #16257: dynamic list_display_links support.
"""
parent = Parent.objects.create(name='parent')
for i in range(1, 10):
Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i)
m = DynamicListDisplayLinksChildAdmin(Child, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/child/', superuser)
response = m.changelist_view(request)
for i in range(1, 10):
link = reverse('admin:admin_changelist_child_change', args=(i,))
self.assertContains(response, '<a href="%s">%s</a>' % (link, i))
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['age'])
def test_no_list_display_links(self):
"""#15185 -- Allow no links from the 'change list' view grid."""
p = Parent.objects.create(name='parent')
m = NoListDisplayLinksParentAdmin(Parent, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/parent/', superuser)
response = m.changelist_view(request)
link = reverse('admin:admin_changelist_parent_change', args=(p.pk,))
self.assertNotContains(response, '<a href="%s">' % link)
def test_tuple_list_display(self):
"""
Regression test for #17128
(ChangeList failing under Python 2.5 after r16319)
"""
swallow = Swallow.objects.create(
origin='Africa', load='12.34', speed='22.2')
model_admin = SwallowAdmin(Swallow, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/swallow/', superuser)
response = model_admin.changelist_view(request)
# just want to ensure it doesn't blow up during rendering
self.assertContains(response, six.text_type(swallow.origin))
self.assertContains(response, six.text_type(swallow.load))
self.assertContains(response, six.text_type(swallow.speed))
def test_deterministic_order_for_unordered_model(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model doesn't have any default ordering defined.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
UnorderedObject.objects.create(id=counter, bool=True)
class UnorderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(ascending=False):
admin.site.register(UnorderedObject, UnorderedObjectAdmin)
model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site)
counter = 0 if ascending else 51
for page in range(0, 5):
request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += 1 if ascending else -1
self.assertEqual(result.id, counter)
admin.site.unregister(UnorderedObject)
# When no order is defined at all, everything is ordered by '-pk'.
check_results_order()
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by -pk as well.
UnorderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
UnorderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order()
UnorderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order(ascending=True)
UnorderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order()
UnorderedObjectAdmin.ordering = ['id', 'bool']
check_results_order(ascending=True)
def test_deterministic_order_for_model_ordered_by_its_manager(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model has a manager that defines a default ordering.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
OrderedObject.objects.create(id=counter, bool=True, number=counter)
class OrderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(ascending=False):
admin.site.register(OrderedObject, OrderedObjectAdmin)
model_admin = OrderedObjectAdmin(OrderedObject, admin.site)
counter = 0 if ascending else 51
for page in range(0, 5):
request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += 1 if ascending else -1
self.assertEqual(result.id, counter)
admin.site.unregister(OrderedObject)
# When no order is defined at all, use the model's default ordering (i.e. 'number')
check_results_order(ascending=True)
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by -pk as well.
OrderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
OrderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order()
OrderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order(ascending=True)
OrderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order()
OrderedObjectAdmin.ordering = ['id', 'bool']
check_results_order(ascending=True)
def test_dynamic_list_filter(self):
"""
Regression tests for ticket #17646: dynamic list_filter support.
"""
parent = Parent.objects.create(name='parent')
for i in range(10):
Child.objects.create(name='child %s' % i, parent=parent)
user_noparents = self._create_superuser('noparents')
user_parents = self._create_superuser('parents')
# Test with user 'noparents'
m = DynamicListFilterChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age'])
# Test with user 'parents'
m = DynamicListFilterChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', user_parents)
response = m.changelist_view(request)
self.assertEqual(response.context_data['cl'].list_filter, ('parent', 'name', 'age'))
def test_dynamic_search_fields(self):
child = self._create_superuser('child')
m = DynamicSearchFieldsChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', child)
response = m.changelist_view(request)
self.assertEqual(response.context_data['cl'].search_fields, ('name', 'age'))
def test_pagination_page_range(self):
"""
Regression tests for ticket #15653: ensure the number of pages
generated for changelist views are correct.
"""
# instantiating and setting up ChangeList object
m = GroupAdmin(Group, admin.site)
request = self.factory.get('/group/')
cl = ChangeList(request, Group, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
per_page = cl.list_per_page = 10
for page_num, objects_count, expected_page_range in [
(0, per_page, []),
(0, per_page * 2, list(range(2))),
(5, per_page * 11, list(range(11))),
(5, per_page * 12, [0, 1, 2, 3, 4, 5, 6, 7, 8, '.', 10, 11]),
(6, per_page * 12, [0, 1, '.', 3, 4, 5, 6, 7, 8, 9, 10, 11]),
(6, per_page * 13, [0, 1, '.', 3, 4, 5, 6, 7, 8, 9, '.', 11, 12]),
]:
# assuming we have exactly `objects_count` objects
Group.objects.all().delete()
for i in range(objects_count):
Group.objects.create(name='test band')
# setting page number and calculating page range
cl.page_num = page_num
cl.get_results(request)
real_page_range = pagination(cl)['page_range']
self.assertListEqual(
expected_page_range,
list(real_page_range),
)
class AdminLogNodeTestCase(TestCase):
def test_get_admin_log_templatetag_custom_user(self):
"""
Regression test for ticket #20088: admin log depends on User model
having id field as primary key.
The old implementation raised an AttributeError when trying to use
the id field.
"""
context = Context({'user': CustomIdUser()})
template_string = '{% load log %}{% get_admin_log 10 as admin_log for_user user %}'
template = Template(template_string)
# Rendering should be u'' since this templatetag just logs,
# it doesn't render any string.
self.assertEqual(template.render(context), '')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_changelist.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_changelist'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['users.json']
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_add_row_selection(self):
"""
Ensure that the status line for selected rows gets updated correcly (#22038)
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/auth/user/'))
form_id = '#changelist-form'
# Test amount of rows in the Changelist
rows = self.selenium.find_elements_by_css_selector(
'%s #result_list tbody tr' % form_id)
self.assertEqual(len(rows), 1)
# Test current selection
selection_indicator = self.selenium.find_element_by_css_selector(
'%s .action-counter' % form_id)
self.assertEqual(selection_indicator.text, "0 of 1 selected")
# Select a row and check again
row_selector = self.selenium.find_element_by_css_selector(
'%s #result_list tbody tr:first-child .action-select' % form_id)
row_selector.click()
self.assertEqual(selection_indicator.text, "1 of 1 selected")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| |
"""The ClimaCell integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from math import ceil
from typing import Any
from pyclimacell import ClimaCellV3, ClimaCellV4
from pyclimacell.const import CURRENT, DAILY, FORECASTS, HOURLY, NOWCAST
from pyclimacell.exceptions import (
CantConnectException,
InvalidAPIKeyException,
RateLimitedException,
UnknownException,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_API_KEY,
CONF_API_VERSION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
ATTRIBUTION,
CC_ATTR_CLOUD_COVER,
CC_ATTR_CONDITION,
CC_ATTR_HUMIDITY,
CC_ATTR_OZONE,
CC_ATTR_PRECIPITATION,
CC_ATTR_PRECIPITATION_PROBABILITY,
CC_ATTR_PRECIPITATION_TYPE,
CC_ATTR_PRESSURE,
CC_ATTR_TEMPERATURE,
CC_ATTR_TEMPERATURE_HIGH,
CC_ATTR_TEMPERATURE_LOW,
CC_ATTR_VISIBILITY,
CC_ATTR_WIND_DIRECTION,
CC_ATTR_WIND_GUST,
CC_ATTR_WIND_SPEED,
CC_SENSOR_TYPES,
CC_V3_ATTR_CLOUD_COVER,
CC_V3_ATTR_CONDITION,
CC_V3_ATTR_HUMIDITY,
CC_V3_ATTR_OZONE,
CC_V3_ATTR_PRECIPITATION,
CC_V3_ATTR_PRECIPITATION_DAILY,
CC_V3_ATTR_PRECIPITATION_PROBABILITY,
CC_V3_ATTR_PRECIPITATION_TYPE,
CC_V3_ATTR_PRESSURE,
CC_V3_ATTR_TEMPERATURE,
CC_V3_ATTR_VISIBILITY,
CC_V3_ATTR_WIND_DIRECTION,
CC_V3_ATTR_WIND_GUST,
CC_V3_ATTR_WIND_SPEED,
CC_V3_SENSOR_TYPES,
CONF_TIMESTEP,
DEFAULT_TIMESTEP,
DOMAIN,
MAX_REQUESTS_PER_DAY,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [SENSOR_DOMAIN, WEATHER_DOMAIN]
def _set_update_interval(hass: HomeAssistant, current_entry: ConfigEntry) -> timedelta:
"""Recalculate update_interval based on existing ClimaCell instances and update them."""
api_calls = 4 if current_entry.data[CONF_API_VERSION] == 3 else 2
# We check how many ClimaCell configured instances are using the same API key and
# calculate interval to not exceed allowed numbers of requests. Divide 90% of
# MAX_REQUESTS_PER_DAY by 4 because every update requires four API calls and we want
# a buffer in the number of API calls left at the end of the day.
other_instance_entry_ids = [
entry.entry_id
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.entry_id != current_entry.entry_id
and entry.data[CONF_API_KEY] == current_entry.data[CONF_API_KEY]
]
interval = timedelta(
minutes=(
ceil(
(24 * 60 * (len(other_instance_entry_ids) + 1) * api_calls)
/ (MAX_REQUESTS_PER_DAY * 0.9)
)
)
)
for entry_id in other_instance_entry_ids:
if entry_id in hass.data[DOMAIN]:
hass.data[DOMAIN][entry_id].update_interval = interval
return interval
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up ClimaCell API from a config entry."""
hass.data.setdefault(DOMAIN, {})
params = {}
# If config entry options not set up, set them up
if not entry.options:
params["options"] = {
CONF_TIMESTEP: DEFAULT_TIMESTEP,
}
else:
# Use valid timestep if it's invalid
timestep = entry.options[CONF_TIMESTEP]
if timestep not in (1, 5, 15, 30):
if timestep <= 2:
timestep = 1
elif timestep <= 7:
timestep = 5
elif timestep <= 20:
timestep = 15
else:
timestep = 30
new_options = entry.options.copy()
new_options[CONF_TIMESTEP] = timestep
params["options"] = new_options
# Add API version if not found
if CONF_API_VERSION not in entry.data:
new_data = entry.data.copy()
new_data[CONF_API_VERSION] = 3
params["data"] = new_data
if params:
hass.config_entries.async_update_entry(entry, **params)
api_class = ClimaCellV3 if entry.data[CONF_API_VERSION] == 3 else ClimaCellV4
api = api_class(
entry.data[CONF_API_KEY],
entry.data.get(CONF_LATITUDE, hass.config.latitude),
entry.data.get(CONF_LONGITUDE, hass.config.longitude),
session=async_get_clientsession(hass),
)
coordinator = ClimaCellDataUpdateCoordinator(
hass,
entry,
api,
_set_update_interval(hass, entry),
)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN][entry.entry_id] = coordinator
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
hass.data[DOMAIN].pop(config_entry.entry_id)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return unload_ok
class ClimaCellDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold ClimaCell data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
api: ClimaCellV3 | ClimaCellV4,
update_interval: timedelta,
) -> None:
"""Initialize."""
self._config_entry = config_entry
self._api_version = config_entry.data[CONF_API_VERSION]
self._api = api
self.name = config_entry.data[CONF_NAME]
self.data = {CURRENT: {}, FORECASTS: {}}
super().__init__(
hass,
_LOGGER,
name=config_entry.data[CONF_NAME],
update_interval=update_interval,
)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
data = {FORECASTS: {}}
try:
if self._api_version == 3:
data[CURRENT] = await self._api.realtime(
[
CC_V3_ATTR_TEMPERATURE,
CC_V3_ATTR_HUMIDITY,
CC_V3_ATTR_PRESSURE,
CC_V3_ATTR_WIND_SPEED,
CC_V3_ATTR_WIND_DIRECTION,
CC_V3_ATTR_CONDITION,
CC_V3_ATTR_VISIBILITY,
CC_V3_ATTR_OZONE,
CC_V3_ATTR_WIND_GUST,
CC_V3_ATTR_CLOUD_COVER,
CC_V3_ATTR_PRECIPITATION_TYPE,
*(sensor_type.key for sensor_type in CC_V3_SENSOR_TYPES),
]
)
data[FORECASTS][HOURLY] = await self._api.forecast_hourly(
[
CC_V3_ATTR_TEMPERATURE,
CC_V3_ATTR_WIND_SPEED,
CC_V3_ATTR_WIND_DIRECTION,
CC_V3_ATTR_CONDITION,
CC_V3_ATTR_PRECIPITATION,
CC_V3_ATTR_PRECIPITATION_PROBABILITY,
],
None,
timedelta(hours=24),
)
data[FORECASTS][DAILY] = await self._api.forecast_daily(
[
CC_V3_ATTR_TEMPERATURE,
CC_V3_ATTR_WIND_SPEED,
CC_V3_ATTR_WIND_DIRECTION,
CC_V3_ATTR_CONDITION,
CC_V3_ATTR_PRECIPITATION_DAILY,
CC_V3_ATTR_PRECIPITATION_PROBABILITY,
],
None,
timedelta(days=14),
)
data[FORECASTS][NOWCAST] = await self._api.forecast_nowcast(
[
CC_V3_ATTR_TEMPERATURE,
CC_V3_ATTR_WIND_SPEED,
CC_V3_ATTR_WIND_DIRECTION,
CC_V3_ATTR_CONDITION,
CC_V3_ATTR_PRECIPITATION,
],
None,
timedelta(
minutes=min(300, self._config_entry.options[CONF_TIMESTEP] * 30)
),
self._config_entry.options[CONF_TIMESTEP],
)
else:
return await self._api.realtime_and_all_forecasts(
[
CC_ATTR_TEMPERATURE,
CC_ATTR_HUMIDITY,
CC_ATTR_PRESSURE,
CC_ATTR_WIND_SPEED,
CC_ATTR_WIND_DIRECTION,
CC_ATTR_CONDITION,
CC_ATTR_VISIBILITY,
CC_ATTR_OZONE,
CC_ATTR_WIND_GUST,
CC_ATTR_CLOUD_COVER,
CC_ATTR_PRECIPITATION_TYPE,
*(sensor_type.key for sensor_type in CC_SENSOR_TYPES),
],
[
CC_ATTR_TEMPERATURE_LOW,
CC_ATTR_TEMPERATURE_HIGH,
CC_ATTR_WIND_SPEED,
CC_ATTR_WIND_DIRECTION,
CC_ATTR_CONDITION,
CC_ATTR_PRECIPITATION,
CC_ATTR_PRECIPITATION_PROBABILITY,
],
)
except (
CantConnectException,
InvalidAPIKeyException,
RateLimitedException,
UnknownException,
) as error:
raise UpdateFailed from error
return data
class ClimaCellEntity(CoordinatorEntity):
"""Base ClimaCell Entity."""
def __init__(
self,
config_entry: ConfigEntry,
coordinator: ClimaCellDataUpdateCoordinator,
api_version: int,
) -> None:
"""Initialize ClimaCell Entity."""
super().__init__(coordinator)
self.api_version = api_version
self._config_entry = config_entry
@staticmethod
def _get_cc_value(
weather_dict: dict[str, Any], key: str
) -> int | float | str | None:
"""
Return property from weather_dict.
Used for V3 API.
"""
items = weather_dict.get(key, {})
# Handle cases where value returned is a list.
# Optimistically find the best value to return.
if isinstance(items, list):
if len(items) == 1:
return items[0].get("value")
return next(
(item.get("value") for item in items if "max" in item),
next(
(item.get("value") for item in items if "min" in item),
items[0].get("value", None),
),
)
return items.get("value")
def _get_current_property(self, property_name: str) -> int | str | float | None:
"""
Get property from current conditions.
Used for V4 API.
"""
return self.coordinator.data.get(CURRENT, {}).get(property_name)
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def device_info(self) -> DeviceInfo:
"""Return device registry information."""
return {
"identifiers": {(DOMAIN, self._config_entry.data[CONF_API_KEY])},
"name": "ClimaCell",
"manufacturer": "ClimaCell",
"sw_version": f"v{self.api_version}",
"entry_type": "service",
}
| |
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101
import os
import sys
import re
import time
import tempfile
import shutil
import threading
from subprocess import CalledProcessError
from wlauto.core.extension import Parameter
from wlauto.common.resources import Executable
from wlauto.core.resource import NO_ONE
from wlauto.common.linux.device import BaseLinuxDevice, PsEntry
from wlauto.exceptions import DeviceError, WorkerThreadError, TimeoutError, DeviceNotRespondingError
from wlauto.utils.misc import convert_new_lines
from wlauto.utils.types import boolean, regex
from wlauto.utils.android import (adb_shell, adb_background_shell, adb_list_devices,
adb_command, AndroidProperties, ANDROID_VERSION_MAP)
SCREEN_STATE_REGEX = re.compile('(?:mPowerState|mScreenOn|Display Power: state)=([0-9]+|true|false|ON|OFF)', re.I)
SCREEN_SIZE_REGEX = re.compile(r'mUnrestrictedScreen=\(\d+,\d+\)\s+(?P<width>\d+)x(?P<height>\d+)')
class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
"""
Device running Android OS.
"""
platform = 'android'
parameters = [
Parameter('adb_name',
description='The unique ID of the device as output by "adb devices".'),
Parameter('android_prompt', kind=regex, default=re.compile('^.*(shell|root)@.*:/\S* [#$] ', re.MULTILINE),
description='The format of matching the shell prompt in Android.'),
Parameter('working_directory', default='/sdcard/wa-working',
description='Directory that will be used WA on the device for output files etc.'),
Parameter('binaries_directory', default='/data/local/tmp', override=True,
description='Location of binaries on the device.'),
Parameter('package_data_directory', default='/data/data',
description='Location of of data for an installed package (APK).'),
Parameter('external_storage_directory', default='/sdcard',
description='Mount point for external storage.'),
Parameter('connection', default='usb', allowed_values=['usb', 'ethernet'],
description='Specified the nature of adb connection.'),
Parameter('logcat_poll_period', kind=int,
description="""
If specified and is not ``0``, logcat will be polled every
``logcat_poll_period`` seconds, and buffered on the host. This
can be used if a lot of output is expected in logcat and the fixed
logcat buffer on the device is not big enough. The trade off is that
this introduces some minor runtime overhead. Not set by default.
"""),
Parameter('enable_screen_check', kind=boolean, default=False,
description="""
Specified whether the device should make sure that the screen is on
during initialization.
"""),
Parameter('swipe_to_unlock', kind=str, default=None,
allowed_values=[None, "horizontal", "vertical"],
description="""
If set a swipe of the specified direction will be performed.
This should unlock the screen.
"""),
]
default_timeout = 30
delay = 2
long_delay = 3 * delay
ready_timeout = 60
# Overwritten from Device. For documentation, see corresponding method in
# Device.
@property
def is_rooted(self):
if self._is_rooted is None:
try:
result = adb_shell(self.adb_name, 'su', timeout=1)
if 'not found' in result:
self._is_rooted = False
else:
self._is_rooted = True
except TimeoutError:
self._is_rooted = True
except DeviceError:
self._is_rooted = False
return self._is_rooted
@property
def abi(self):
return self.getprop()['ro.product.cpu.abi'].split('-')[0]
@property
def supported_eabi(self):
props = self.getprop()
result = [props['ro.product.cpu.abi']]
if 'ro.product.cpu.abi2' in props:
result.append(props['ro.product.cpu.abi2'])
if 'ro.product.cpu.abilist' in props:
for eabi in props['ro.product.cpu.abilist'].split(','):
if eabi not in result:
result.append(eabi)
return result
def __init__(self, **kwargs):
super(AndroidDevice, self).__init__(**kwargs)
self._logcat_poller = None
def reset(self):
self._is_ready = False
self._just_rebooted = True
adb_command(self.adb_name, 'reboot', timeout=self.default_timeout)
def hard_reset(self):
super(AndroidDevice, self).hard_reset()
self._is_ready = False
self._just_rebooted = True
def boot(self, hard=False, **kwargs):
if hard:
self.hard_reset()
else:
self.reset()
def connect(self): # NOQA pylint: disable=R0912
iteration_number = 0
max_iterations = self.ready_timeout / self.delay
available = False
self.logger.debug('Polling for device {}...'.format(self.adb_name))
while iteration_number < max_iterations:
devices = adb_list_devices()
if self.adb_name:
for device in devices:
if device.name == self.adb_name and device.status != 'offline':
available = True
else: # adb_name not set
if len(devices) == 1:
available = True
elif len(devices) > 1:
raise DeviceError('More than one device is connected and adb_name is not set.')
if available:
break
else:
time.sleep(self.delay)
iteration_number += 1
else:
raise DeviceError('Could not boot {} ({}).'.format(self.name, self.adb_name))
while iteration_number < max_iterations:
available = (int('0' + (adb_shell(self.adb_name, 'getprop sys.boot_completed', timeout=self.default_timeout))) == 1)
if available:
break
else:
time.sleep(self.delay)
iteration_number += 1
else:
raise DeviceError('Could not boot {} ({}).'.format(self.name, self.adb_name))
if self._just_rebooted:
self.logger.debug('Waiting for boot to complete...')
# On some devices, adb connection gets reset some time after booting.
# This causes errors during execution. To prevent this, open a shell
# session and wait for it to be killed. Once its killed, give adb
# enough time to restart, and then the device should be ready.
# TODO: This is more of a work-around rather than an actual solution.
# Need to figure out what is going on the "proper" way of handling it.
try:
adb_shell(self.adb_name, '', timeout=20)
time.sleep(5) # give adb time to re-initialize
except TimeoutError:
pass # timed out waiting for the session to be killed -- assume not going to be.
self.logger.debug('Boot completed.')
self._just_rebooted = False
self._is_ready = True
def initialize(self, context):
self.sqlite = self.deploy_sqlite3(context) # pylint: disable=attribute-defined-outside-init
if self.is_rooted:
self.disable_screen_lock()
self.disable_selinux()
if self.enable_screen_check:
self.ensure_screen_is_on()
def disconnect(self):
if self._logcat_poller:
self._logcat_poller.close()
def ping(self):
try:
# May be triggered inside initialize()
adb_shell(self.adb_name, 'ls /', timeout=10)
except (TimeoutError, CalledProcessError):
raise DeviceNotRespondingError(self.adb_name or self.name)
def start(self):
if self.logcat_poll_period:
if self._logcat_poller:
self._logcat_poller.close()
self._logcat_poller = _LogcatPoller(self, self.logcat_poll_period, timeout=self.default_timeout)
self._logcat_poller.start()
def stop(self):
if self._logcat_poller:
self._logcat_poller.stop()
def get_android_version(self):
return ANDROID_VERSION_MAP.get(self.get_sdk_version(), None)
def get_android_id(self):
"""
Get the device's ANDROID_ID. Which is
"A 64-bit number (as a hex string) that is randomly generated when the user
first sets up the device and should remain constant for the lifetime of the
user's device."
.. note:: This will get reset on userdata erasure.
"""
output = self.execute('content query --uri content://settings/secure --projection value --where "name=\'android_id\'"').strip()
return output.split('value=')[-1]
def get_sdk_version(self):
try:
return int(self.getprop('ro.build.version.sdk'))
except (ValueError, TypeError):
return None
def get_installed_package_version(self, package):
"""
Returns the version (versionName) of the specified package if it is installed
on the device, or ``None`` otherwise.
Added in version 2.1.4
"""
output = self.execute('dumpsys package {}'.format(package))
for line in convert_new_lines(output).split('\n'):
if 'versionName' in line:
return line.split('=', 1)[1]
return None
def list_packages(self):
"""
List packages installed on the device.
Added in version 2.1.4
"""
output = self.execute('pm list packages')
output = output.replace('package:', '')
return output.split()
def package_is_installed(self, package_name):
"""
Returns ``True`` the if a package with the specified name is installed on
the device, and ``False`` otherwise.
Added in version 2.1.4
"""
return package_name in self.list_packages()
def executable_is_installed(self, executable_name): # pylint: disable=unused-argument,no-self-use
raise AttributeError("""Instead of using is_installed, please use
``get_binary_path`` or ``install_if_needed`` instead. You should
use the path returned by these functions to then invoke the binary
please see: https://pythonhosted.org/wlauto/writing_extensions.html""")
def is_installed(self, name):
if self.package_is_installed(name):
return True
elif "." in name: # assumes android packages have a . in their name and binaries documentation
return False
else:
raise AttributeError("""Instead of using is_installed, please use
``get_binary_path`` or ``install_if_needed`` instead. You should
use the path returned by these functions to then invoke the binary
please see: https://pythonhosted.org/wlauto/writing_extensions.html""")
def listdir(self, path, as_root=False, **kwargs):
contents = self.execute('ls {}'.format(path), as_root=as_root)
return [x.strip() for x in contents.split()]
def push_file(self, source, dest, as_root=False, timeout=default_timeout): # pylint: disable=W0221
"""
Modified in version 2.1.4: added ``as_root`` parameter.
"""
self._check_ready()
try:
if not as_root:
adb_command(self.adb_name, "push '{}' '{}'".format(source, dest), timeout=timeout)
else:
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
adb_command(self.adb_name, "push '{}' '{}'".format(source, device_tempfile), timeout=timeout)
self.execute('cp {} {}'.format(device_tempfile, dest), as_root=True)
except CalledProcessError as e:
raise DeviceError(e)
def pull_file(self, source, dest, as_root=False, timeout=default_timeout): # pylint: disable=W0221
"""
Modified in version 2.1.4: added ``as_root`` parameter.
"""
self._check_ready()
try:
if not as_root:
adb_command(self.adb_name, "pull '{}' '{}'".format(source, dest), timeout=timeout)
else:
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
self.execute('cp {} {}'.format(source, device_tempfile), as_root=True)
adb_command(self.adb_name, "pull '{}' '{}'".format(device_tempfile, dest), timeout=timeout)
except CalledProcessError as e:
raise DeviceError(e)
def delete_file(self, filepath, as_root=False): # pylint: disable=W0221
self._check_ready()
adb_shell(self.adb_name, "rm '{}'".format(filepath), as_root=as_root, timeout=self.default_timeout)
def file_exists(self, filepath):
self._check_ready()
output = adb_shell(self.adb_name, 'if [ -e \'{}\' ]; then echo 1; else echo 0; fi'.format(filepath),
timeout=self.default_timeout)
return bool(int(output))
def install(self, filepath, timeout=default_timeout, with_name=None): # pylint: disable=W0221
ext = os.path.splitext(filepath)[1].lower()
if ext == '.apk':
return self.install_apk(filepath, timeout)
else:
return self.install_executable(filepath, with_name)
def install_apk(self, filepath, timeout=default_timeout): # pylint: disable=W0221
self._check_ready()
ext = os.path.splitext(filepath)[1].lower()
if ext == '.apk':
return adb_command(self.adb_name, "install '{}'".format(filepath), timeout=timeout)
else:
raise DeviceError('Can\'t install {}: unsupported format.'.format(filepath))
def install_executable(self, filepath, with_name=None):
"""
Installs a binary executable on device. Returns
the path to the installed binary, or ``None`` if the installation has failed.
Optionally, ``with_name`` parameter may be used to specify a different name under
which the executable will be installed.
Added in version 2.1.3.
Updated in version 2.1.5 with ``with_name`` parameter.
"""
self._ensure_binaries_directory_is_writable()
executable_name = with_name or os.path.basename(filepath)
on_device_file = self.path.join(self.working_directory, executable_name)
on_device_executable = self.path.join(self.binaries_directory, executable_name)
self.push_file(filepath, on_device_file)
self.execute('cp {} {}'.format(on_device_file, on_device_executable), as_root=self.is_rooted)
self.execute('chmod 0777 {}'.format(on_device_executable), as_root=self.is_rooted)
return on_device_executable
def uninstall(self, package):
self._check_ready()
adb_command(self.adb_name, "uninstall {}".format(package), timeout=self.default_timeout)
def uninstall_executable(self, executable_name):
"""
Added in version 2.1.3.
"""
on_device_executable = self.get_binary_path(executable_name, search_system_binaries=False)
if not on_device_executable:
raise DeviceError("Could not uninstall {}, binary not found".format(on_device_executable))
self._ensure_binaries_directory_is_writable()
self.delete_file(on_device_executable, as_root=self.is_rooted)
def execute(self, command, timeout=default_timeout, check_exit_code=True, background=False,
as_root=False, busybox=False, **kwargs):
"""
Execute the specified command on the device using adb.
Parameters:
:param command: The command to be executed. It should appear exactly
as if you were typing it into a shell.
:param timeout: Time, in seconds, to wait for adb to return before aborting
and raising an error. Defaults to ``AndroidDevice.default_timeout``.
:param check_exit_code: If ``True``, the return code of the command on the Device will
be check and exception will be raised if it is not 0.
Defaults to ``True``.
:param background: If ``True``, will execute adb in a subprocess, and will return
immediately, not waiting for adb to return. Defaults to ``False``
:param busybox: If ``True``, will use busybox to execute the command. Defaults to ``False``.
Added in version 2.1.3
.. note:: The device must be rooted to be able to use some busybox features.
:param as_root: If ``True``, will attempt to execute command in privileged mode. The device
must be rooted, otherwise an error will be raised. Defaults to ``False``.
Added in version 2.1.3
:returns: If ``background`` parameter is set to ``True``, the subprocess object will
be returned; otherwise, the contents of STDOUT from the device will be returned.
:raises: DeviceError if adb timed out or if the command returned non-zero exit
code on the device, or if attempting to execute a command in privileged mode on an
unrooted device.
"""
self._check_ready()
if as_root and not self.is_rooted:
raise DeviceError('Attempting to execute "{}" as root on unrooted device.'.format(command))
if busybox:
command = ' '.join([self.busybox, command])
if background:
return adb_background_shell(self.adb_name, command, as_root=as_root)
else:
return adb_shell(self.adb_name, command, timeout, check_exit_code, as_root)
def kick_off(self, command, as_root=True):
"""
Like execute but closes adb session and returns immediately, leaving the command running on the
device (this is different from execute(background=True) which keeps adb connection open and returns
a subprocess object).
.. note:: This relies on busybox's nohup applet and so won't work on unrooted devices.
Added in version 2.1.4
"""
if not self.is_rooted or not as_root:
raise DeviceError('kick_off uses busybox\'s nohup applet and so can only be run a rooted device.')
try:
command = 'cd {} && busybox nohup {}'.format(self.working_directory, command)
output = self.execute(command, timeout=1, as_root=True)
except TimeoutError:
pass
else:
raise ValueError('Background command exited before timeout; got "{}"'.format(output))
def get_pids_of(self, process_name):
"""Returns a list of PIDs of all processes with the specified name."""
result = self.execute('ps | {} grep {}'.format(self.busybox, process_name),
check_exit_code=False).strip()
if result and 'not found' not in result:
return [int(x.split()[1]) for x in result.split('\n')]
else:
return []
def ps(self, **kwargs):
"""
Returns the list of running processes on the device. Keyword arguments may
be used to specify simple filters for columns.
Added in version 2.1.4
"""
lines = iter(convert_new_lines(self.execute('ps')).split('\n'))
lines.next() # header
result = []
for line in lines:
parts = line.split()
if parts:
result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
if not kwargs:
return result
else:
filtered_result = []
for entry in result:
if all(getattr(entry, k) == v for k, v in kwargs.iteritems()):
filtered_result.append(entry)
return filtered_result
def get_properties(self, context):
"""Captures and saves the information from /system/build.prop and /proc/version"""
props = super(AndroidDevice, self).get_properties(context)
props.update(self._get_android_properties(context))
return props
def _get_android_properties(self, context):
props = {}
props['android_id'] = self.get_android_id()
buildprop_file = os.path.join(context.host_working_directory, 'build.prop')
if not os.path.isfile(buildprop_file):
self.pull_file('/system/build.prop', context.host_working_directory)
self._update_build_properties(buildprop_file, props)
context.add_run_artifact('build_properties', buildprop_file, 'export')
dumpsys_target_file = self.path.join(self.working_directory, 'window.dumpsys')
dumpsys_host_file = os.path.join(context.host_working_directory, 'window.dumpsys')
self.execute('{} > {}'.format('dumpsys window', dumpsys_target_file))
self.pull_file(dumpsys_target_file, dumpsys_host_file)
context.add_run_artifact('dumpsys_window', dumpsys_host_file, 'meta')
return props
def getprop(self, prop=None):
"""Returns parsed output of Android getprop command. If a property is
specified, only the value for that property will be returned (with
``None`` returned if the property doesn't exist. Otherwise,
``wlauto.utils.android.AndroidProperties`` will be returned, which is
a dict-like object."""
props = AndroidProperties(self.execute('getprop'))
if prop:
return props[prop]
return props
def deploy_sqlite3(self, context):
host_file = context.resolver.get(Executable(NO_ONE, self.abi, 'sqlite3'))
target_file = self.install_if_needed(host_file)
return target_file
# Android-specific methods. These either rely on specifics of adb or other
# Android-only concepts in their interface and/or implementation.
def forward_port(self, from_port, to_port):
"""
Forward a port on the device to a port on localhost.
:param from_port: Port on the device which to forward.
:param to_port: Port on the localhost to which the device port will be forwarded.
Ports should be specified using adb spec. See the "adb forward" section in "adb help".
"""
adb_command(self.adb_name, 'forward {} {}'.format(from_port, to_port), timeout=self.default_timeout)
def dump_logcat(self, outfile, filter_spec=None):
"""
Dump the contents of logcat, for the specified filter spec to the
specified output file.
See http://developer.android.com/tools/help/logcat.html
:param outfile: Output file on the host into which the contents of the
log will be written.
:param filter_spec: Logcat filter specification.
see http://developer.android.com/tools/debugging/debugging-log.html#filteringOutput
"""
if self._logcat_poller:
return self._logcat_poller.write_log(outfile)
else:
if filter_spec:
command = 'logcat -d -s {} > {}'.format(filter_spec, outfile)
else:
command = 'logcat -d > {}'.format(outfile)
return adb_command(self.adb_name, command, timeout=self.default_timeout)
def clear_logcat(self):
"""Clear (flush) logcat log."""
if self._logcat_poller:
return self._logcat_poller.clear_buffer()
else:
return adb_shell(self.adb_name, 'logcat -c', timeout=self.default_timeout)
def get_screen_size(self):
output = self.execute('dumpsys window')
match = SCREEN_SIZE_REGEX.search(output)
if match:
return (int(match.group('width')),
int(match.group('height')))
else:
return (0, 0)
def perform_unlock_swipe(self):
width, height = self.get_screen_size()
command = 'input swipe {} {} {} {}'
if self.swipe_to_unlock == "horizontal":
swipe_heigh = height * 2 // 3
start = 100
stop = width - start
self.execute(command.format(start, swipe_heigh, stop, swipe_heigh))
if self.swipe_to_unlock == "vertical":
swipe_middle = height / 2
swipe_heigh = height * 2 // 3
self.execute(command.format(swipe_middle, swipe_heigh, swipe_middle, 0))
else: # Should never reach here
raise DeviceError("Invalid swipe direction: {}".format(self.swipe_to_unlock))
def capture_screen(self, filepath):
"""Caputers the current device screen into the specified file in a PNG format."""
on_device_file = self.path.join(self.working_directory, 'screen_capture.png')
self.execute('screencap -p {}'.format(on_device_file))
self.pull_file(on_device_file, filepath)
self.delete_file(on_device_file)
def is_screen_on(self):
"""Returns ``True`` if the device screen is currently on, ``False`` otherwise."""
output = self.execute('dumpsys power')
match = SCREEN_STATE_REGEX.search(output)
if match:
return boolean(match.group(1))
else:
raise DeviceError('Could not establish screen state.')
def ensure_screen_is_on(self):
if not self.is_screen_on():
self.execute('input keyevent 26')
if self.swipe_to_unlock:
self.perform_unlock_swipe()
def disable_screen_lock(self):
"""
Attempts to disable he screen lock on the device.
.. note:: This does not always work...
Added inversion 2.1.4
"""
lockdb = '/data/system/locksettings.db'
sqlcommand = "update locksettings set value='0' where name='screenlock.disabled';"
self.execute('{} {} "{}"'.format(self.sqlite, lockdb, sqlcommand), as_root=True)
def disable_selinux(self):
# This may be invoked from intialize() so we can't use execute() or the
# standard API for doing this.
api_level = int(adb_shell(self.adb_name, 'getprop ro.build.version.sdk',
timeout=self.default_timeout).strip())
# SELinux was added in Android 4.3 (API level 18). Trying to
# 'getenforce' in earlier versions will produce an error.
if api_level >= 18:
se_status = self.execute('getenforce', as_root=True).strip()
if se_status == 'Enforcing':
self.execute('setenforce 0', as_root=True)
def get_device_model(self):
try:
return self.getprop(prop='ro.product.device')
except KeyError:
return None
# Internal methods: do not use outside of the class.
def _update_build_properties(self, filepath, props):
try:
with open(filepath) as fh:
for line in fh:
line = re.sub(r'#.*', '', line).strip()
if not line:
continue
key, value = line.split('=', 1)
props[key] = value
except ValueError:
self.logger.warning('Could not parse build.prop.')
def _update_versions(self, filepath, props):
with open(filepath) as fh:
text = fh.read()
props['version'] = text
text = re.sub(r'#.*', '', text).strip()
match = re.search(r'^(Linux version .*?)\s*\((gcc version .*)\)$', text)
if match:
props['linux_version'] = match.group(1).strip()
props['gcc_version'] = match.group(2).strip()
else:
self.logger.warning('Could not parse version string.')
def _ensure_binaries_directory_is_writable(self):
matched = []
for entry in self.list_file_systems():
if self.binaries_directory.rstrip('/').startswith(entry.mount_point):
matched.append(entry)
if matched:
entry = sorted(matched, key=lambda x: len(x.mount_point))[-1]
if 'rw' not in entry.options:
self.execute('mount -o rw,remount {} {}'.format(entry.device, entry.mount_point), as_root=True)
else:
raise DeviceError('Could not find mount point for binaries directory {}'.format(self.binaries_directory))
class _LogcatPoller(threading.Thread):
join_timeout = 5
def __init__(self, device, period, timeout=None):
super(_LogcatPoller, self).__init__()
self.adb_device = device.adb_name
self.logger = device.logger
self.period = period
self.timeout = timeout
self.stop_signal = threading.Event()
self.lock = threading.RLock()
self.buffer_file = tempfile.mktemp()
self.last_poll = 0
self.daemon = True
self.exc = None
def run(self):
self.logger.debug('Starting logcat polling.')
try:
while True:
if self.stop_signal.is_set():
break
with self.lock:
current_time = time.time()
if (current_time - self.last_poll) >= self.period:
self._poll()
time.sleep(0.5)
except Exception: # pylint: disable=W0703
self.exc = WorkerThreadError(self.name, sys.exc_info())
self.logger.debug('Logcat polling stopped.')
def stop(self):
self.logger.debug('Stopping logcat polling.')
self.stop_signal.set()
self.join(self.join_timeout)
if self.is_alive():
self.logger.error('Could not join logcat poller thread.')
if self.exc:
raise self.exc # pylint: disable=E0702
def clear_buffer(self):
self.logger.debug('Clearing logcat buffer.')
with self.lock:
adb_shell(self.adb_device, 'logcat -c', timeout=self.timeout)
with open(self.buffer_file, 'w') as _: # NOQA
pass
def write_log(self, outfile):
self.logger.debug('Writing logbuffer to {}.'.format(outfile))
with self.lock:
self._poll()
if os.path.isfile(self.buffer_file):
shutil.copy(self.buffer_file, outfile)
else: # there was no logcat trace at this time
with open(outfile, 'w') as _: # NOQA
pass
def close(self):
self.logger.debug('Closing logcat poller.')
if os.path.isfile(self.buffer_file):
os.remove(self.buffer_file)
def _poll(self):
with self.lock:
self.last_poll = time.time()
adb_command(self.adb_device, 'logcat -d >> {}'.format(self.buffer_file), timeout=self.timeout)
adb_command(self.adb_device, 'logcat -c', timeout=self.timeout)
class BigLittleDevice(AndroidDevice): # pylint: disable=W0223
parameters = [
Parameter('scheduler', default='hmp', override=True),
]
| |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Submission', fields ['question', 'user']
db.delete_unique('cms_saq_submission', ['question', 'user_id'])
# Adding model 'SubmissionSet'
db.create_table('cms_saq_submissionset', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='saq_submissions_sets', to=orm['auth.User'])),
))
db.send_create_signal('cms_saq', ['SubmissionSet'])
# Adding field 'Submission.submission_set'
db.add_column('cms_saq_submission', 'submission_set',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='submissions', null=True, to=orm['cms_saq.SubmissionSet']),
keep_default=False)
# Adding unique constraint on 'Submission', fields ['submission_set', 'question', 'user']
db.create_unique('cms_saq_submission', ['submission_set_id', 'question', 'user_id'])
# Adding field 'FormNav.end_submission_set'
db.add_column('cmsplugin_formnav', 'end_submission_set',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
# Adding field 'FormNav.submission_set_tag'
db.add_column('cmsplugin_formnav', 'submission_set_tag',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Removing unique constraint on 'Submission', fields ['submission_set', 'question', 'user']
db.delete_unique('cms_saq_submission', ['submission_set_id', 'question', 'user_id'])
# Deleting model 'SubmissionSet'
db.delete_table('cms_saq_submissionset')
# Deleting field 'Submission.submission_set'
db.delete_column('cms_saq_submission', 'submission_set_id')
# Adding unique constraint on 'Submission', fields ['question', 'user']
db.create_unique('cms_saq_submission', ['question', 'user_id'])
# Deleting field 'FormNav.end_submission_set'
db.delete_column('cmsplugin_formnav', 'end_submission_set')
# Deleting field 'FormNav.submission_set_tag'
db.delete_column('cmsplugin_formnav', 'submission_set_tag')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 9, 11, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms_saq.answer': {
'Meta': {'ordering': "('order', 'slug')", 'unique_together': "(('question', 'slug'),)", 'object_name': 'Answer'},
'help_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['cms_saq.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms_saq.bulkanswer': {
'Meta': {'object_name': 'BulkAnswer', 'db_table': "'cmsplugin_bulkanswer'", '_ormbases': ['cms.CMSPlugin']},
'answer_value': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms_saq.formnav': {
'Meta': {'object_name': 'FormNav', 'db_table': "'cmsplugin_formnav'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'end_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'formnav_ends'", 'null': 'True', 'to': "orm['cms.Page']"}),
'end_page_condition_question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms_saq.Question']", 'null': 'True', 'blank': 'True'}),
'end_page_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'end_submission_set': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'next_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'formnav_nexts'", 'null': 'True', 'to': "orm['cms.Page']"}),
'next_page_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'prev_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'formnav_prevs'", 'null': 'True', 'to': "orm['cms.Page']"}),
'prev_page_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'submission_set_tag': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'cms_saq.groupedanswer': {
'Meta': {'ordering': "('group', 'order', 'slug')", 'object_name': 'GroupedAnswer', '_ormbases': ['cms_saq.Answer']},
'answer_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms_saq.Answer']", 'unique': 'True', 'primary_key': 'True'}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms_saq.progressbar': {
'Meta': {'object_name': 'ProgressBar', 'db_table': "'cmsplugin_progressbar'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'count_optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'cms_saq.question': {
'Meta': {'object_name': 'Question', 'db_table': "'cmsplugin_question'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'cms_saq.scoresection': {
'Meta': {'ordering': "('order', 'label')", 'object_name': 'ScoreSection'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sections'", 'to': "orm['cms_saq.SectionedScoring']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms_saq.sectionedscoring': {
'Meta': {'object_name': 'SectionedScoring', 'db_table': "'cmsplugin_sectionedscoring'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'})
},
'cms_saq.submission': {
'Meta': {'ordering': "('submission_set', 'user', 'question')", 'unique_together': "(('question', 'user', 'submission_set'),)", 'object_name': 'Submission'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'submission_set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submissions'", 'null': 'True', 'to': "orm['cms_saq.SubmissionSet']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'saq_submissions'", 'to': "orm['auth.User']"})
},
'cms_saq.submissionset': {
'Meta': {'object_name': 'SubmissionSet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'saq_submissions_sets'", 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['cms_saq']
| |
#!../../../../virtualenv/bin/python3
# -*- coding: utf-8 -*-
# NB: The shebang line above assumes you've installed a python virtual environment alongside your working copy of the
# <4most-4gp-scripts> git repository. It also only works if you invoke this python script from the directory where it
# is located. If these two assumptions are incorrect (e.g. you're using Conda), you can still use this script by typing
# <python mean_exposure_time_from_library.py>, but <./mean_exposure_time_from_library.py> will not work.
"""
Take a bunch of template spectra in a SpectrumLibrary, and list the exposure times needed to observe them if they
were at some particular reference magnitude.
"""
import argparse
import logging
from os import path as os_path
import numpy as np
from fourgp_fourfs import FourFS
from fourgp_speclib import SpectrumLibrarySqlite
our_path = os_path.split(os_path.abspath(__file__))[0]
root_path = os_path.join(our_path, "../../../..")
# Read input parameters
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--library',
required=True,
dest="library",
help="The name of the spectrum library we are to read input spectra from. A subset of the stars "
"in the input library may optionally be selected by suffixing its name with a comma-separated "
"list of constraints in [] brackets. Use the syntax my_library[Teff=3000] to demand equality, "
"or [0<[Fe/H]<0.2] to specify a range. We do not currently support other operators like "
"[Teff>5000], but such ranges are easy to recast is a range, e.g. [5000<Teff<9999].")
parser.add_argument('--workspace', dest='workspace', default="",
help="Directory where we expect to find spectrum libraries.")
parser.add_argument('--binary-path',
required=False,
default=root_path,
dest="binary_path",
help="Specify a directory where 4FS package is installed.")
parser.add_argument('--snr-definition',
action="append",
dest="snr_definitions",
help="Specify a way of defining SNR, in the form 'name,minimum,maximum', meaning we calculate the "
"median SNR per pixel between minimum and maximum wavelengths in Angstrom.")
parser.add_argument('--snr-list',
required=False,
default="10,12,14,16,18,20,23,26,30,35,40,45,50,80,100,130,180,250",
dest="snr_list",
help="Specify a comma-separated list of the SNRs that 4FS is to degrade spectra to.")
parser.add_argument('--snr-definitions-lrs',
required=False,
default="",
dest="snr_definitions_lrs",
help="Specify the SNR definition to use for LRS. For example, 'GalDiskHR_536NM' to use the S4 "
"green definition of SNR. You can even specify three comma-separated definitions, e.g. "
"'GalDiskHR_536NM,GalDiskHR_536NM,GalDiskHR_536NM' to use different SNR metrics for the "
"RGB arms within 4MOST LRS, though this is a pretty weird thing to want to do.")
parser.add_argument('--snr-definitions-hrs',
required=False,
default="",
dest="snr_definitions_hrs",
help="Specify the SNR definition to use for HRS. For example, 'GalDiskHR_536NM' to use the S4 "
"green definition of SNR. You can even specify three comma-separated definitions, e.g. "
"'GalDiskHR_536NM,GalDiskHR_536NM,GalDiskHR_536NM' to use different SNR metrics for the "
"RGB arms within 4MOST HRS, though this is a pretty weird thing to want to do.")
parser.add_argument('--run-hrs',
action='store_true',
dest="run_hrs",
help="Set 4FS to produce output for 4MOST HRS [default].")
parser.add_argument('--no-run-hrs',
action='store_false',
dest="run_hrs",
help="Set 4FS not to produce output for 4MOST HRS. Setting this will make us run quicker.")
parser.set_defaults(run_hrs=True)
parser.add_argument('--run-lrs',
action='store_true',
dest="run_lrs",
help="Set 4FS to produce output for 4MOST LRS [default].")
parser.add_argument('--no-run-lrs',
action='store_false',
dest="run_lrs",
help="Set 4FS not to produce output for 4MOST LRS. Setting this will make us run quicker.")
parser.set_defaults(run_lrs=True)
parser.add_argument('--photometric-band',
required=False,
default="SDSS_r",
dest="photometric_band",
help="The name of the photometric band in which the magnitudes in --mag-list are specified. This "
"must match a band which is recognised by the pyphot python package.")
parser.add_argument('--mag-list',
required=False,
default="15",
dest="mag_list",
help="Specify a comma-separated list of the magnitudes to assume when simulating observations "
"of each object. If multiple magnitudes are specified, than each input spectrum we be "
"output multiple times, once at each magnitude.")
args = parser.parse_args()
# Start logger
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(levelname)s:%(filename)s:%(message)s',
datefmt='%d/%m/%Y %H:%M:%S')
logger = logging.getLogger(__name__)
logger.info("Calculating magnitudes and exposure times for templates")
# Set path to workspace where we create libraries of spectra
workspace = args.workspace if args.workspace else os_path.join(our_path, "../../../workspace")
os.system("mkdir -p {}".format(workspace))
# Parse any definitions of SNR we were supplied on the command line
if (args.snr_definitions is None) or (len(args.snr_definitions) < 1):
snr_definitions = None
else:
snr_definitions = []
for snr_definition in args.snr_definitions:
words = snr_definition.split(",")
snr_definitions.append([words[0], float(words[1]), float(words[2])])
# Look up what definition of SNR is user specified we should use for 4MOST LRS
if len(args.snr_definitions_lrs) < 1:
# Case 1: None was specified, so we use default
snr_definitions_lrs = None
else:
snr_definitions_lrs = args.snr_definitions_lrs.split(",")
# Case 2: A single definition was supplied which we use for all three arms
if len(snr_definitions_lrs) == 1:
snr_definitions_lrs *= 3
# Case 3: Three definitions were supplied, one for each arm
assert len(snr_definitions_lrs) == 3
# Look up what definition of SNR is user specified we should use for 4MOST HRS
if len(args.snr_definitions_hrs) < 1:
# Case 1: None was specified, so we use default
snr_definitions_hrs = None
else:
snr_definitions_hrs = args.snr_definitions_hrs.split(",")
# Case 2: A single definition was supplied which we use for all three arms
if len(snr_definitions_hrs) == 1:
snr_definitions_hrs *= 3
# Case 3: Three definitions were supplied, one for each arm
assert len(snr_definitions_hrs) == 3
# Parse the list of SNRs that the user specified on the command line
snr_list = [float(item.strip()) for item in args.snr_list.split(",")]
# Parse the list of magnitudes that the user specified on the command line
mag_list = [float(item.strip()) for item in args.mag_list.split(",")]
# Initialise output data structure
output = {} # output[magnitude]["HRS"][snr] = list of exposure times in seconds
# Loop over all the magnitudes we are to simulate for each object
for magnitude in mag_list:
# Instantiate 4FS wrapper
etc_wrapper = FourFS(
path_to_4fs=os_path.join(args.binary_path, "OpSys/ETC"),
snr_definitions=snr_definitions,
magnitude=magnitude,
magnitude_unreddened=False,
photometric_band=args.photometric_band,
run_lrs=args.run_lrs,
run_hrs=args.run_hrs,
lrs_use_snr_definitions=snr_definitions_lrs,
hrs_use_snr_definitions=snr_definitions_hrs,
snr_list=snr_list,
snr_per_pixel=False
)
# Open input SpectrumLibrary, and search for flux normalised spectra meeting our filtering constraints
spectra = SpectrumLibrarySqlite.open_and_search(library_spec=args.library,
workspace=workspace,
extra_constraints={"continuum_normalised": 0}
)
# Get a list of the spectrum IDs which we were returned
input_library, input_spectra_ids, input_spectra_constraints = [spectra[i]
for i in ("library", "items", "constraints")]
# Loop over spectra to process
for input_spectrum_id in input_spectra_ids:
logger.info("Working on <{}>".format(input_spectrum_id['filename']))
# Open Spectrum data from disk
input_spectrum_array = input_library.open(ids=input_spectrum_id['specId'])
# Turn SpectrumArray object into a Spectrum object
input_spectrum = input_spectrum_array.extract_item(0)
# Look up the unique ID of the star we've just loaded
# Newer spectrum libraries have a uid field which is guaranteed unique; for older spectrum libraries use
# Starname instead.
# Work out which field we're using (uid or Starname)
spectrum_matching_field = 'uid' if 'uid' in input_spectrum.metadata else 'Starname'
# Look up the unique ID of this object
object_name = input_spectrum.metadata[spectrum_matching_field]
# Search for the continuum-normalised version of this same object (which will share the same uid / name)
search_criteria = input_spectra_constraints.copy()
search_criteria[spectrum_matching_field] = object_name
search_criteria['continuum_normalised'] = 1
continuum_normalised_spectrum_id = input_library.search(**search_criteria)
# Check that continuum-normalised spectrum exists and is unique
assert len(continuum_normalised_spectrum_id) == 1, "Could not find continuum-normalised spectrum."
# Load the continuum-normalised version
input_spectrum_continuum_normalised_arr = input_library.open(
ids=continuum_normalised_spectrum_id[0]['specId']
)
# Turn the SpectrumArray we got back into a single Spectrum
input_spectrum_continuum_normalised = input_spectrum_continuum_normalised_arr.extract_item(0)
# Pass this spectrum to 4FS
degraded_spectra = etc_wrapper.process_spectra(
spectra_list=((input_spectrum, input_spectrum_continuum_normalised),)
)
# Loop over LRS and HRS
for mode in degraded_spectra:
# Loop over the spectra we simulated (there was only one!)
for index in degraded_spectra[mode]:
# Loop over the various SNRs we simulated
for snr in degraded_spectra[mode][index]:
# Extract the exposure time returned by 4FS from the metadata associated with this Spectrum object
# The exposure time is recorded in seconds
exposure_time = degraded_spectra[mode][index][snr]["spectrum"].metadata["exposure"]
# Record this exposure time into a list of the times recorded for this [mag][mode][snr] combination
if magnitude not in output:
output[magnitude] = {}
if mode not in output:
output[magnitude][mode] = {}
if snr not in output[mode]:
output[magnitude][mode][snr] = []
output[magnitude][mode][snr].append(exposure_time)
# Print output
for magnitude in sorted(output.keys()):
for mode in sorted(output[magnitude].keys()):
for snr in sorted(output[magnitude][mode].keys()):
# Calculate the mean exposure time, and the standard deviation of the distribution
exposure_time_mean = np.mean(output[mode][snr])
exposure_time_sd = np.std(output[mode][snr])
# Print a row of output
print("{mode:6s} {magnitude:6.1f} {snr:6.1f} {mean:6.3f} {std_dev:6.3f}".format(mode=mode,
magnitude=magnitude,
snr=snr,
mean=exposure_time_mean,
std_dev=exposure_time_sd
))
| |
# Kubeflow Pipeline with Katib component.
# In this example you will create Katib Experiment using Bayesian optimization algorithm.
# As a Trial template you will use Kubeflow MPIJob with Horovod mnist training container.
# After that, you will compile a Kubeflow Pipeline with your Katib Experiment.
# Use Kubeflow Pipelines UI to upload the Pipeline and create the Run.
# This Experiment is similar to this: https://github.com/kubeflow/katib/blob/master/examples/v1beta1/mpijob-horovod.yaml
# Check the training container source code here: https://github.com/kubeflow/mpi-operator/tree/master/examples/horovod.
# Note: To run this example, your Kubernetes cluster should run MPIJob operator.
# Follow this guide to install MPIJob on your cluster: https://www.kubeflow.org/docs/components/training/mpi/
# Note: You have to install kfp>=1.1.1 SDK and kubeflow-katib>=0.10.1 SDK to run this example.
import kfp
import kfp.dsl as dsl
from kfp import components
from kubeflow.katib import ApiClient
from kubeflow.katib import V1beta1ExperimentSpec
from kubeflow.katib import V1beta1AlgorithmSpec
from kubeflow.katib import V1beta1AlgorithmSetting
from kubeflow.katib import V1beta1ObjectiveSpec
from kubeflow.katib import V1beta1ParameterSpec
from kubeflow.katib import V1beta1FeasibleSpace
from kubeflow.katib import V1beta1TrialTemplate
from kubeflow.katib import V1beta1TrialParameterSpec
@dsl.pipeline(
name="Launch Katib MPIJob Experiment",
description="An example to launch Katib Experiment with MPIJob"
)
def horovod_mnist_hpo():
# Experiment name and namespace.
experiment_name = "mpi-horovod-mnist"
experiment_namespace = "anonymous"
# Trial count specification.
max_trial_count = 6
max_failed_trial_count = 3
parallel_trial_count = 2
# Objective specification.
objective = V1beta1ObjectiveSpec(
type="minimize",
goal=0.01,
objective_metric_name="loss",
)
# Algorithm specification.
algorithm = V1beta1AlgorithmSpec(
algorithm_name="bayesianoptimization",
algorithm_settings=[
V1beta1AlgorithmSetting(
name="random_state",
value="10"
)
]
)
# Experiment search space.
# In this example we tune learning rate and number of training steps.
parameters = [
V1beta1ParameterSpec(
name="lr",
parameter_type="double",
feasible_space=V1beta1FeasibleSpace(
min="0.001",
max="0.003"
),
),
V1beta1ParameterSpec(
name="num-steps",
parameter_type="int",
feasible_space=V1beta1FeasibleSpace(
min="50",
max="150",
step="10"
),
),
]
# JSON template specification for the Trial's Worker Kubeflow MPIJob.
trial_spec = {
"apiVersion": "kubeflow.org/v1",
"kind": "MPIJob",
"spec": {
"slotsPerWorker": 1,
"cleanPodPolicy": "Running",
"mpiReplicaSpecs": {
"Launcher": {
"replicas": 1,
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"image": "docker.io/kubeflow/mpi-horovod-mnist",
"name": "mpi-launcher",
"command": [
"mpirun"
],
"args": [
"-np",
"2",
"--allow-run-as-root",
"-bind-to",
"none",
"-map-by",
"slot",
"-x",
"LD_LIBRARY_PATH",
"-x",
"PATH",
"-mca",
"pml",
"ob1",
"-mca",
"btl",
"^openib",
"python",
"/examples/tensorflow_mnist.py",
"--lr",
"${trialParameters.learningRate}",
"--num-steps",
"${trialParameters.numberSteps}"
],
"resources": {
"limits": {
"cpu": "500m",
"memory": "2Gi"
}
}
}
]
}
}
},
"Worker": {
"replicas": 2,
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"image": "docker.io/kubeflow/mpi-horovod-mnist",
"name": "mpi-worker",
"resources": {
"limits": {
"cpu": "500m",
"memory": "4Gi"
}
}
}
]
}
}
}
}
}
}
# Configure parameters for the Trial template.
trial_template = V1beta1TrialTemplate(
primary_pod_labels={
"mpi-job-role": "launcher"
},
primary_container_name="mpi-launcher",
success_condition='status.conditions.#(type=="Succeeded")#|#(status=="True")#',
failure_condition='status.conditions.#(type=="Failed")#|#(status=="True")#',
trial_parameters=[
V1beta1TrialParameterSpec(
name="learningRate",
description="Learning rate for the training model",
reference="lr"
),
V1beta1TrialParameterSpec(
name="numberSteps",
description="Number of training steps",
reference="num-steps"
),
],
trial_spec=trial_spec
)
# Create Experiment specification.
experiment_spec = V1beta1ExperimentSpec(
max_trial_count=max_trial_count,
max_failed_trial_count=max_failed_trial_count,
parallel_trial_count=parallel_trial_count,
objective=objective,
algorithm=algorithm,
parameters=parameters,
trial_template=trial_template
)
# Get the Katib launcher.
# Load component from the URL or from the file.
katib_experiment_launcher_op = components.load_component_from_url(
"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml")
# katib_experiment_launcher_op = components.load_component_from_file(
# "../../../components/kubeflow/katib-launcher/component.yaml"
# )
# Katib launcher component.
# Experiment Spec should be serialized to a valid Kubernetes object.
# The Experiment is deleted after the Pipeline is finished.
op = katib_experiment_launcher_op(
experiment_name=experiment_name,
experiment_namespace=experiment_namespace,
experiment_spec=ApiClient().sanitize_for_serialization(experiment_spec),
experiment_timeout_minutes=60)
# Output container to print the results.
dsl.ContainerOp(
name="best-hp",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo Best HyperParameters: %s" % op.output],
)
if __name__ == "__main__":
kfp.compiler.Compiler().compile(horovod_mnist_hpo, __file__ + ".tar.gz")
| |
"""
Test code for the python version of HdrHistogram.
Ported from
https://github.com/HdrHistogram/HdrHistogram (Java)
https://github.com/HdrHistogram/HdrHistogram_c (C)
Written by Alec Hothan
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division
from __future__ import print_function
from builtins import range
import cProfile
import datetime
import os
import pytest
import zlib
import sys
from ctypes import addressof
from ctypes import c_uint8
from ctypes import c_uint16
from ctypes import c_uint32
from ctypes import c_uint64
from ctypes import sizeof
from ctypes import string_at
from hdrh.codec import HdrPayload
from hdrh.codec import HdrCookieException
from hdrh.histogram import HdrHistogram
from hdrh.log import HistogramLogWriter
from hdrh.log import HistogramLogReader
from pyhdrh import add_array
from pyhdrh import encode
from pyhdrh import decode
# histogram __init__ values
LOWEST = 1
HIGHEST = 3600 * 1000 * 1000
SIGNIFICANT = 10
TEST_VALUE_LEVEL = 4
INTERVAL = 10000
@pytest.mark.basic
def test_basic():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
assert(histogram.bucket_count == 22)
assert(histogram.sub_bucket_count == 2048)
assert(histogram.counts_len == 23552)
assert(histogram.unit_magnitude == 0)
assert(histogram.sub_bucket_half_count_magnitude == 10)
@pytest.mark.basic
def test_empty_histogram():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
assert(histogram.get_min_value() == 0)
assert(histogram.get_max_value() == 0)
assert(histogram.get_mean_value() == 0)
assert(histogram.get_stddev() == 0)
@pytest.mark.basic
def test_large_numbers():
histogram = HdrHistogram(20000000, 100000000, 17)
histogram.record_value(100000000)
histogram.record_value(20000000)
histogram.record_value(30000000)
assert(histogram.values_are_equivalent(20000000, histogram.get_value_at_percentile(50.0)))
assert(histogram.values_are_equivalent(30000000, histogram.get_value_at_percentile(83.33)))
assert(histogram.values_are_equivalent(100000000, histogram.get_value_at_percentile(83.34)))
assert(histogram.values_are_equivalent(100000000, histogram.get_value_at_percentile(99.0)))
@pytest.mark.basic
def test_record_value():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
histogram.record_value(TEST_VALUE_LEVEL)
assert(histogram.get_count_at_value(TEST_VALUE_LEVEL) == 1)
assert(histogram.get_total_count() == 1)
@pytest.mark.basic
def test_highest_equivalent_value():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
assert 8183 * 1024 + 1023 == histogram.get_highest_equivalent_value(8180 * 1024)
assert 8191 * 1024 + 1023 == histogram.get_highest_equivalent_value(8191 * 1024)
assert 8199 * 1024 + 1023 == histogram.get_highest_equivalent_value(8193 * 1024)
assert 9999 * 1024 + 1023 == histogram.get_highest_equivalent_value(9995 * 1024)
assert 10007 * 1024 + 1023 == histogram.get_highest_equivalent_value(10007 * 1024)
assert 10015 * 1024 + 1023 == histogram.get_highest_equivalent_value(10008 * 1024)
@pytest.mark.basic
def test_scaled_highest_equiv_value():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
assert 8183 == histogram.get_highest_equivalent_value(8180)
assert 8191 == histogram.get_highest_equivalent_value(8191)
assert 8199 == histogram.get_highest_equivalent_value(8193)
assert 9999 == histogram.get_highest_equivalent_value(9995)
assert 10007 == histogram.get_highest_equivalent_value(10007)
assert 10015 == histogram.get_highest_equivalent_value(10008)
def load_histogram():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
# record this value with a count of 10,000
histogram.record_value(1000, 10000)
histogram.record_value(100000000)
return histogram
def load_corrected_histogram():
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
# record this value with a count of 10,000
histogram.record_corrected_value(1000, INTERVAL, 10000)
histogram.record_corrected_value(100000000, INTERVAL)
return histogram
def check_percentile(hist, percentile, value, variation):
value_at = hist.get_value_at_percentile(percentile)
assert(abs(value_at - value) < value * variation)
def check_hist_percentiles(hist, total_count, perc_value_list):
for pair in perc_value_list:
check_percentile(hist, pair[0], pair[1], 0.001)
assert(hist.get_total_count() == total_count)
assert(hist.values_are_equivalent(hist.get_min_value(), 1000.0))
assert(hist.values_are_equivalent(hist.get_max_value(), 100000000.0))
def check_percentiles(histogram, corrected_histogram):
check_hist_percentiles(histogram,
10001,
((30.0, 1000.0),
(99.0, 1000.0),
(99.99, 1000.0),
(99.999, 100000000.0),
(100.0, 100000000.0)))
check_hist_percentiles(corrected_histogram,
20000,
((30.0, 1000.0),
(50.0, 1000.0),
(75.0, 50000000.0),
(90.0, 80000000.0),
(99.0, 98000000.0),
(99.999, 100000000.0),
(100.0, 100000000.0)))
@pytest.mark.basic
def test_percentiles():
check_percentiles(load_histogram(), load_corrected_histogram())
@pytest.mark.iterators
def test_recorded_iterator():
hist = load_histogram()
index = 0
for item in hist.get_recorded_iterator():
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
assert(count_added_in_this_bucket == 10000)
else:
assert(count_added_in_this_bucket == 1)
index += 1
assert(index == 2)
hist = load_corrected_histogram()
index = 0
total_added_count = 0
for item in hist.get_recorded_iterator():
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
assert(count_added_in_this_bucket == 10000)
assert(item.count_at_value_iterated_to != 0)
total_added_count += count_added_in_this_bucket
index += 1
assert(total_added_count == 20000)
assert(total_added_count == hist.get_total_count())
def check_iterator_values(itr, last_index):
index = 0
for item in itr:
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
assert(count_added_in_this_bucket == 10000)
elif index == last_index:
assert(count_added_in_this_bucket == 1)
else:
assert(count_added_in_this_bucket == 0)
index += 1
assert(index - 1 == last_index)
def check_corrected_iterator_values(itr, last_index):
index = 0
total_added_count = 0
for item in itr:
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
# first bucket is range [0, 10000]
# value 1000 count = 10000
# value 10000 count = 1 (corrected from the 100M value with 10K interval)
assert(count_added_in_this_bucket == 10001)
index += 1
total_added_count += count_added_in_this_bucket
assert(index - 1 == last_index)
assert(total_added_count == 20000)
@pytest.mark.iterators
def test_linear_iterator():
hist = load_histogram()
itr = hist.get_linear_iterator(100000)
check_iterator_values(itr, 999)
hist = load_corrected_histogram()
itr = hist.get_linear_iterator(10000)
check_corrected_iterator_values(itr, 9999)
@pytest.mark.iterators
def test_log_iterator():
hist = load_histogram()
itr = hist.get_log_iterator(10000, 2.0)
check_iterator_values(itr, 14)
hist = load_corrected_histogram()
itr = hist.get_log_iterator(10000, 2.0)
check_corrected_iterator_values(itr, 14)
@pytest.mark.iterators
def test_percentile_iterator():
hist = load_histogram()
# test with 5 ticks per half distance
for item in hist.get_percentile_iterator(5):
expected = hist.get_highest_equivalent_value(hist.get_value_at_percentile(item.percentile))
assert(item.value_iterated_to == expected)
@pytest.mark.iterators
def test_reset_iterator():
hist = load_corrected_histogram()
itr = hist.get_recorded_iterator()
# do a partial iteration
index = 0
total_added_count = 0
for item in itr:
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
assert(count_added_in_this_bucket == 10000)
assert(item.count_at_value_iterated_to != 0)
total_added_count += count_added_in_this_bucket
index += 1
if total_added_count >= 10000:
break
# reset iterator and do a full iteration
itr.reset()
index = 0
total_added_count = 0
for item in itr:
count_added_in_this_bucket = item.count_added_in_this_iter_step
if index == 0:
assert(count_added_in_this_bucket == 10000)
assert(item.count_at_value_iterated_to != 0)
total_added_count += count_added_in_this_bucket
index += 1
assert(total_added_count == 20000)
assert(total_added_count == hist.get_total_count())
# just run the reset method
hist.get_all_values_iterator().reset()
hist.get_linear_iterator(100000).reset()
hist.get_log_iterator(10000, 2.0).reset()
hist.get_percentile_iterator(5).reset()
@pytest.mark.basic
def test_reset():
histogram = load_histogram()
histogram.reset()
assert(histogram.get_total_count() == 0)
assert(histogram.get_value_at_percentile(99.99) == 0)
assert(histogram.get_start_time_stamp() == sys.maxsize)
assert(histogram.get_end_time_stamp() == 0)
@pytest.mark.basic
def test_invalid_significant_figures():
try:
HdrHistogram(LOWEST, HIGHEST, -1)
assert False
except ValueError:
pass
try:
HdrHistogram(LOWEST, HIGHEST, 18)
assert False
except ValueError:
pass
@pytest.mark.basic
def test_out_of_range_values():
histogram = HdrHistogram(1, 1000, 14)
assert(histogram.record_value(32767))
assert(histogram.record_value(32768) is False)
# Make up a list of values for testing purpose
VALUES_LIST = (
1000,
1000,
3000,
3000
)
@pytest.mark.basic
def test_mean_stddev():
# fill up a histogram with the values in the list
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
for value in VALUES_LIST:
histogram.record_value(value)
assert(histogram.get_mean_value() == 2000.5)
assert(histogram.get_stddev() == 1000.5)
HDR_PAYLOAD_COUNTS = 1000
HDR_PAYLOAD_PARTIAL_COUNTS = HDR_PAYLOAD_COUNTS // 2
def fill_counts(payload, last_index, start=0):
# note that this function should only be used for
# raw payload level operations, shoud not be used for payloads that are
# created from a histogram, see fill_hist_counts
counts = payload.get_counts()
for index in range(start, last_index):
counts[index] = index
def check_counts(payload, last_index, multiplier=1, start=0):
counts = payload.get_counts()
for index in range(start, last_index):
assert(counts[index] == multiplier * index)
def check_hdr_payload(counter_size):
# Create an HdrPayload class with given counters count
payload = HdrPayload(counter_size, HDR_PAYLOAD_COUNTS)
# put some known numbers in the buckets
fill_counts(payload, HDR_PAYLOAD_COUNTS)
# get a compressed version of that payload
cpayload = payload.compress(HDR_PAYLOAD_COUNTS)
# now decompress it into a new hdr payload instance
dpayload = HdrPayload(counter_size, compressed_payload=cpayload)
dpayload.init_counts(HDR_PAYLOAD_COUNTS)
# now verify that the counters are identical to the original
check_counts(dpayload, HDR_PAYLOAD_COUNTS)
@pytest.mark.codec
def test_hdr_payload():
# Check the payload work in all 3 supported counter sizes
for counter_size in [2, 4, 8]:
check_hdr_payload(counter_size)
@pytest.mark.codec
def test_hdr_payload_exceptions():
# test invalid zlib compressed buffer
with pytest.raises(zlib.error):
HdrPayload(2, compressed_payload=b'junk data')
# unsupported word size
with pytest.raises(ValueError):
HdrPayload(1, HDR_PAYLOAD_COUNTS)
with pytest.raises(ValueError):
HdrPayload(1000, HDR_PAYLOAD_COUNTS)
# invalid cookie
payload = HdrPayload(8, HDR_PAYLOAD_COUNTS)
payload.payload.cookie = 12345
cpayload = payload.compress(HDR_PAYLOAD_COUNTS)
with pytest.raises(HdrCookieException):
HdrPayload(2, compressed_payload=cpayload)
def fill_hist_counts(histogram, last_index, start=0):
# fill the counts of a given histogram and update the min/max/total count
# accordingly
for index in range(start, last_index):
value_from_index = histogram.get_value_from_index(index)
histogram.record_value(value_from_index, index)
def check_hist_counts(histogram, last_index, multiplier=1, start=0):
for index in range(start, last_index):
assert(histogram.get_count_at_index(index) == multiplier * index)
# This is the max latency used by wrk2
WRK2_MAX_LATENCY = 24 * 60 * 60 * 1000000
def check_hist_encode(word_size,
digits,
expected_compressed_length,
fill_start_percent,
fill_count_percent):
histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, digits,
word_size=word_size)
if fill_count_percent:
fill_start_index = (fill_start_percent * histogram.counts_len) // 100
fill_to_index = fill_start_index + (fill_count_percent * histogram.counts_len) // 100
fill_hist_counts(histogram, fill_to_index, fill_start_index)
b64 = histogram.encode()
assert(len(b64) == expected_compressed_length)
# A list of call arguments to check_hdr_encode
ENCODE_ARG_LIST = (
# word size digits expected_compressed_length, fill_start%, fill_count%
# best case when all counters are zero
(8, 11, 1680, 0, 0), # V1=52 385 = size when compressing entire counts array
(8, 8, 48, 0, 0), # 126
# typical case when all counters are aggregated in a small contiguous area
(8, 11, 15560, 30, 20), # V1=16452
(8, 8, 1688, 30, 20), # V1=2096
# worst case when all counters are different
(8, 11, 76892, 0, 100), # V1=80680
(8, 8, 9340, 0, 100), # V1=10744
# worst case 32-bit and 16-bit counters
(2, 11, 76892, 0, 100), # V1=68936
(2, 8, 9340, 0, 100), # V1=9144
)
@pytest.mark.codec
def test_hist_encode():
for args in ENCODE_ARG_LIST:
check_hist_encode(*args)
@pytest.mark.codec
def check_hist_codec_b64(word_size, b64_wrap):
histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT,
b64_wrap=b64_wrap,
word_size=word_size)
# encode with all zero counters
encoded = histogram.encode()
# add back same histogram
histogram.decode_and_add(encoded)
# counters should remain zero
check_hist_counts(histogram, histogram.counts_len, multiplier=0)
# fill up the histogram
fill_hist_counts(histogram, histogram.counts_len)
encoded = histogram.encode()
histogram.decode_and_add(encoded)
check_hist_counts(histogram, histogram.counts_len, multiplier=2)
@pytest.mark.codec
def test_hist_codec():
for word_size in [2, 4, 8]:
check_hist_codec_b64(word_size, True)
check_hist_codec_b64(word_size, False)
@pytest.mark.codec
def test_hist_codec_partial():
histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT)
partial_histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, SIGNIFICANT)
# put some known numbers in the first half buckets
half_count = partial_histogram.counts_len
fill_hist_counts(partial_histogram, half_count)
encoded = partial_histogram.encode()
histogram.decode_and_add(encoded)
# now verify that the partial counters are identical to the original
check_hist_counts(histogram, half_count, multiplier=1)
check_hist_counts(histogram, histogram.counts_len, start=half_count + 1, multiplier=0)
# A list of encoded histograms as generated by the test code in HdrHistogram_c
# encoded from the standard Hdr test histograms (load_histogram())
# These are all histograms with 64-bit counters
ENCODE_SAMPLES_HDRHISTOGRAM_C = [
# standard Hdr test histogram
'HISTFAAAACl4nJNpmSzMwMBgyAABzFCaEURcm7yEwf4DROA8/4I5jNM7mJgAlWkH9g==',
# standard Hdr test corrected histogram
'HISTFAAAAP94nJNpmSzMwCByigECmKE0I4i4NnkJg/0HiMB5/gVzGD8aM/3lZ7rPyTSbjektC9N7Fqa'
'HzEzbmZi2whCEvZKRaSYj02wwiYng4tFM3lDoC2dhhwh5UyZlJlUMjClCmgpMEUUmQSZ+IBZEojFFCM'
'vQRwUxenmZ2MGQFUqz4+CTo4I2pg4dFdQylZWJkYkZCaPyMEUIydPLjMHrssFixuB12XD0HRAwMsFJg'
'kwilZHJHHKmjzp41MFDOjhYmFiQEUEmMWqGsvKBd8Fwd/Co/waTC9jYOMAIiSJRhLbKh5x9Q87Bo/YN'
'bfsoM4CPhw+IIJAkxnDXN+QcTIpyAPnGh6k='
]
@pytest.mark.codec
def test_hdr_interop():
# decode and add the encoded histograms
histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
corrected_histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[0])
corrected_histogram.decode_and_add(ENCODE_SAMPLES_HDRHISTOGRAM_C[1])
# check the percentiles. min, max values match
check_percentiles(histogram, corrected_histogram)
def check_cod_perf():
histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, 2)
fill_start_index = (20 * histogram.counts_len) // 100
fill_to_index = fill_start_index + (30 * histogram.counts_len) // 100
fill_hist_counts(histogram, fill_to_index, fill_start_index)
# encode 1000 times
start = datetime.datetime.now()
for _ in range(1000):
histogram.encode()
delta = datetime.datetime.now() - start
print(delta)
def check_dec_perf():
histogram = HdrHistogram(LOWEST, WRK2_MAX_LATENCY, 2)
fill_start_index = (20 * histogram.counts_len) // 100
fill_to_index = fill_start_index + (30 * histogram.counts_len) // 100
fill_hist_counts(histogram, fill_to_index, fill_start_index)
b64 = histogram.encode()
# decode and add to self 1000 times
start = datetime.datetime.now()
for _ in range(1000):
histogram.decode_and_add(b64)
delta = datetime.datetime.now() - start
print(delta)
@pytest.mark.perf
def test_cod_perf():
cProfile.runctx('check_cod_perf()', globals(), locals())
@pytest.mark.perf
def test_dec_perf():
cProfile.runctx('check_dec_perf()', globals(), locals())
def check_decoded_hist_counts(hist, multiplier):
assert hist
check_hist_counts(hist, hist.counts_len, multiplier)
HDR_LOG_NAME = 'hdr.log'
@pytest.mark.log
def test_log():
# 3 histograms instances with various content
empty_hist = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
hist = load_histogram()
corrected_hist = load_corrected_histogram()
with open(HDR_LOG_NAME, 'w') as hdr_log:
log_writer = HistogramLogWriter(hdr_log)
log_writer.output_comment("Logged with hdrhistogram.py")
log_writer.output_log_format_version()
log_writer.output_legend()
# snapshot the 3 histograms
log_writer.output_interval_histogram(empty_hist)
log_writer.output_interval_histogram(hist)
log_writer.output_interval_histogram(corrected_hist)
log_writer.close()
# decode the log file and check the decoded histograms
log_reader = HistogramLogReader(HDR_LOG_NAME, empty_hist)
decoded_empty_hist = log_reader.get_next_interval_histogram()
check_decoded_hist_counts(decoded_empty_hist, 0)
decoded_hist = log_reader.get_next_interval_histogram()
decoded_corrected_hist = log_reader.get_next_interval_histogram()
check_percentiles(decoded_hist, decoded_corrected_hist)
assert(log_reader.get_next_interval_histogram() is None)
JHICCUP_V2_LOG_NAME = "test/jHiccup-2.0.7S.logV2.hlog"
# Test input and expected output values
JHICCUP_CHECKLISTS = [
{'target': {'histogram_count': 62,
'total_count': 48761,
'accumulated_histogram.get_value_at_percentile(99.9)': 1745879039,
'accumulated_histogram.get_max_value()': 1796210687,
'log_reader.get_start_time_sec()': 1441812279.474}},
{'range_start_time_sec': 5,
'range_end_time_sec': 20,
'target': {'histogram_count': 15,
'total_count': 11664,
'accumulated_histogram.get_value_at_percentile(99.9)': 1536163839,
'accumulated_histogram.get_max_value()': 1544552447}},
{'range_start_time_sec': 40,
'range_end_time_sec': 60,
'target': {'histogram_count': 20,
'total_count': 15830,
'accumulated_histogram.get_value_at_percentile(99.9)': 1779433471,
'accumulated_histogram.get_max_value()': 1796210687}}
]
@pytest.mark.log
def test_jHiccup_v2_log():
accumulated_histogram = HdrHistogram(LOWEST, HIGHEST, SIGNIFICANT)
for checklist in JHICCUP_CHECKLISTS:
accumulated_histogram.reset()
log_reader = HistogramLogReader(JHICCUP_V2_LOG_NAME, accumulated_histogram)
histogram_count = 0
total_count = 0
target_numbers = checklist.pop('target')
while 1:
decoded_histogram = log_reader.get_next_interval_histogram(**checklist)
if not decoded_histogram:
break
histogram_count += 1
total_count += decoded_histogram.get_total_count()
accumulated_histogram.add(decoded_histogram)
# These logs use 8 byte counters
assert(decoded_histogram.get_word_size() == 8)
for statement in target_numbers:
assert(eval(statement) == target_numbers[statement])
log_reader.close()
@pytest.mark.log
def test_output_percentile_distribution():
histogram = load_histogram()
histogram.output_percentile_distribution(open(os.devnull, 'wb'), 1000)
ARRAY_SIZE = 10
@pytest.mark.pyhdrh
def test_add_array_errors():
with pytest.raises(TypeError):
add_array()
with pytest.raises(TypeError):
add_array(100)
with pytest.raises(TypeError):
add_array(None, None, 0, 0)
src_array = (c_uint16 * ARRAY_SIZE)()
# negative length
with pytest.raises(ValueError):
add_array(addressof(src_array), addressof(src_array), -1, sizeof(c_uint16))
# invalid word size
with pytest.raises(ValueError):
add_array(addressof(src_array), addressof(src_array), 0, 0)
def check_add_array(int_type):
src_array = (int_type * ARRAY_SIZE)()
dst_array = (int_type * ARRAY_SIZE)()
expect_added = 0
for index in range(ARRAY_SIZE):
src_array[index] = index
expect_added += index
added = add_array(addressof(dst_array), addressof(src_array), ARRAY_SIZE, sizeof(int_type))
assert(added == expect_added)
for index in range(ARRAY_SIZE):
assert(dst_array[index] == index)
# overflow
src_array[0] = -1
dst_array[0] = -1
with pytest.raises(OverflowError):
add_array(addressof(dst_array), addressof(src_array), ARRAY_SIZE, sizeof(int_type))
@pytest.mark.pyhdrh
def test_add_array():
for int_type in [c_uint16, c_uint32, c_uint64]:
check_add_array(int_type)
@pytest.mark.pyhdrh
def test_zz_encode_errors():
with pytest.raises(TypeError):
encode()
with pytest.raises(TypeError):
encode(None, None, 0, 0)
src_array = (c_uint16 * ARRAY_SIZE)()
src_array_addr = addressof(src_array)
dst_len = 9 * ARRAY_SIZE
# negative length
with pytest.raises(ValueError):
encode(src_array_addr, -1, sizeof(c_uint16), 0, dst_len)
# dest length too small
with pytest.raises(ValueError):
encode(src_array_addr, ARRAY_SIZE, 4, 0, 4)
# invalid word size
with pytest.raises(ValueError):
encode(src_array_addr, ARRAY_SIZE, 3, 0, 0)
# Null dest ptr
with pytest.raises(ValueError):
encode(src_array_addr, ARRAY_SIZE, 4, 0, dst_len)
def check_zz_encode(int_type):
src_array = (int_type * ARRAY_SIZE)()
src_array_addr = addressof(src_array)
dst_len = 9 * ARRAY_SIZE
dst_array = (c_uint8 * dst_len)()
dst_array_addr = addressof(dst_array)
res = encode(src_array_addr, ARRAY_SIZE, sizeof(int_type), dst_array_addr, dst_len)
# should be 1 byte set to 0x13 (10 zeros => value = -10, or 0x13 in zigzag
# encoding
assert(res == 1)
assert(dst_array[0] == 0x13)
# last counter set to 1
# the encoded result should be 2 bytes long
# 0x11 (9 zeros => -9 coded as 17)
# 0x02 (1 is coded as 2)
src_array[ARRAY_SIZE - 1] = 1
res = encode(src_array_addr, ARRAY_SIZE, sizeof(int_type), dst_array_addr, dst_len)
assert(res == 2)
assert(dst_array[0] == 0x11)
assert(dst_array[1] == 0x02)
# all counters set to 1, we should get a zigzag encoded of
# 10 bytes all set to 0x02 (in zigzag encoding 1 is coded as 2)
for index in range(ARRAY_SIZE):
src_array[index] = 1
res = encode(src_array_addr, ARRAY_SIZE, sizeof(int_type), dst_array_addr, dst_len)
assert(res == ARRAY_SIZE)
for index in range(ARRAY_SIZE):
assert(dst_array[index] == 2)
@pytest.mark.pyhdrh
def test_zz_encode():
for int_type in [c_uint16, c_uint32, c_uint64]:
check_zz_encode(int_type)
# Few malicious V2 encodes using ZiZag LEB128/9 bytes
# Valid large value overflows smaller size dest counter
# This is the largest positive number (zigzag odd numbers are positive)
LARGE_POSITIVE_VALUE = b'\xFE\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'
# This is the largest negative number
LARGE_NEGATIVE_VALUE = b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF'
#
# A simple 1 at index 0, followed by a
# large enough negative value to be dangerous: -2147483648 (smallest negative signed 32 bit)
INDEX_SKIPPER_VALUE = b'\x01\x02\xFF\xFF\xFF\xFF\x0F\x02'
# Truncated end
TRUNCATED_VALUE = b'\xFF\xFF'
@pytest.mark.pyhdrh
def test_zz_decode_errors():
with pytest.raises(TypeError):
decode(None, None, 0, 0)
dst_array = (c_uint16 * ARRAY_SIZE)()
# negative array size
with pytest.raises(IndexError):
decode(b' ', 0, addressof(dst_array), -1, sizeof(c_uint16))
# invalid word size
with pytest.raises(ValueError):
decode(b' ', 0, addressof(dst_array), ARRAY_SIZE, 3)
# read index negative
with pytest.raises(IndexError):
decode(b'', -1, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
# Truncated end
with pytest.raises(ValueError):
decode(TRUNCATED_VALUE, 0, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
# Too large positive value for this counter size
with pytest.raises(OverflowError):
decode(LARGE_POSITIVE_VALUE, 0, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
# Negative overflow
with pytest.raises(OverflowError):
decode(LARGE_NEGATIVE_VALUE, 0, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
# zero count skip index out of bounds
with pytest.raises(IndexError):
decode(INDEX_SKIPPER_VALUE, 0, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
# read index too large => empty results
res = decode(b'BUMMER', 8, addressof(dst_array), ARRAY_SIZE, sizeof(c_uint16))
assert(res['total'] == 0)
def check_zz_identity(src_array, int_type, min_nz_index, max_nz_index, total_count, offset):
dst_len = (sizeof(int_type) + 1) * ARRAY_SIZE
dst = (c_uint8 * (offset + dst_len))()
varint_len = encode(addressof(src_array), ARRAY_SIZE, sizeof(int_type),
addressof(dst) + offset, dst_len)
varint_string = string_at(dst, varint_len + offset)
dst_array = (int_type * ARRAY_SIZE)()
res = decode(varint_string, offset, addressof(dst_array), ARRAY_SIZE, sizeof(int_type))
assert(res['total'] == total_count)
if total_count:
assert(res['min_nonzero_index'] == min_nz_index)
assert(res['max_nonzero_index'] == max_nz_index)
for index in range(ARRAY_SIZE):
assert(dst_array[index] == src_array[index])
# A large positive value that can fit 16-bit signed
ZZ_COUNTER_VALUE = 30000
def check_zz_decode(int_type, hdr_len):
src_array = (int_type * ARRAY_SIZE)()
check_zz_identity(src_array, int_type, 0, 0, 0, hdr_len)
# last counter set to ZZ_COUNTER_VALUE
# min=max=ARRAY_SIZE-1
src_array[ARRAY_SIZE - 1] = ZZ_COUNTER_VALUE
check_zz_identity(src_array, int_type, ARRAY_SIZE - 1,
ARRAY_SIZE - 1, ZZ_COUNTER_VALUE, hdr_len)
# all counters set to ZZ_COUNTER_VALUE
for index in range(ARRAY_SIZE):
src_array[index] = ZZ_COUNTER_VALUE
check_zz_identity(src_array, int_type, 0, ARRAY_SIZE - 1,
ZZ_COUNTER_VALUE * ARRAY_SIZE, hdr_len)
@pytest.mark.pyhdrh
def test_zz_decode():
for int_type in [c_uint16, c_uint32, c_uint64]:
for hdr_len in [0, 8]:
check_zz_decode(int_type, hdr_len)
def hex_dump(label, str):
print(label)
print(':'.join(x.encode('hex') for x in str))
| |
import re
import datetime
from pyramid.i18n import TranslationString as _
from colander import (
deferred,
SchemaNode,
String,
OneOf,
Range,
Sequence,
Length,
ContainsOnly,
Int,
Decimal,
Boolean,
Regex,
Email,
Date,
DateTime,
Mapping,
drop,
)
from . import registry, TypeField, TypeFieldNode
from .json import JSONList
__all__ = ['IntField', 'StringField', 'RangeField',
'RegexField', 'EmailField', 'URLField',
'EnumField', 'ChoicesField', 'DecimalField',
'DateField', 'DateTimeField', 'GroupField',
'AnnotationField']
@registry.add('int')
class IntField(TypeField):
node = Int
hint = _('An integer')
@registry.add('string')
class StringField(TypeField):
node = String
hint = _('A set of characters')
@registry.add('text')
class TextField(TypeField):
node = String
hint = _('A text')
@registry.add('annotation')
class AnnotationField(TypeField):
required = False
@classmethod
def definition(cls, **kwargs):
schema = SchemaNode(Mapping(unknown="preserve"))
schema.add(SchemaNode(String(), name='label', missing=u''))
schema.add(SchemaNode(String(), name='type',
validator=OneOf(["annotation"])))
return schema
@registry.add('decimal')
class DecimalField(TypeField):
node = Decimal
hint = _('A decimal number')
@registry.add('boolean')
class BooleanField(TypeField):
node = Boolean
hint = _('True or false')
@registry.add('enum')
class EnumField(TypeField):
node = String
hint = _('A choice among values')
@classmethod
def definition(cls, **kwargs):
schema = super(EnumField, cls).definition()
schema.add(SchemaNode(Sequence(), SchemaNode(String()),
name='choices', validator=Length(min=1)))
return schema
@classmethod
def validation(cls, **kwargs):
kwargs['validator'] = OneOf(kwargs['choices'])
return super(EnumField, cls).validation(**kwargs)
@registry.add('choices')
class ChoicesField(TypeField):
node = JSONList
hint = _('Some choices among values')
@classmethod
def definition(cls, **kwargs):
schema = super(ChoicesField, cls).definition()
schema.add(SchemaNode(Sequence(), SchemaNode(String()),
name='choices', validator=Length(min=1)))
return schema
@classmethod
def validation(cls, **kwargs):
kwargs['validator'] = ContainsOnly(kwargs['choices'])
return super(ChoicesField, cls).validation(**kwargs)
@registry.add('range')
class RangeField(TypeField):
node = Int
hint = _('A number with limits')
@classmethod
def definition(cls, **kwargs):
schema = super(RangeField, cls).definition()
schema.add(SchemaNode(Int(), name='min'))
schema.add(SchemaNode(Int(), name='max'))
return schema
@classmethod
def validation(cls, **kwargs):
kwargs['validator'] = Range(min=kwargs.get('min'),
max=kwargs.get('max'))
return super(RangeField, cls).validation(**kwargs)
@registry.add('regex')
class RegexField(TypeField):
"""Allows to validate a field with a python regular expression."""
node = String
hint = _('A string matching a pattern')
@classmethod
def definition(cls, **kwargs):
schema = super(RegexField, cls).definition()
schema.add(SchemaNode(String(), name='regex', validator=Length(min=1)))
return schema
@classmethod
def validation(cls, **kwargs):
kwargs['validator'] = Regex(kwargs['regex'])
return super(RegexField, cls).validation(**kwargs)
@registry.add('email')
class EmailField(TypeField):
"""An email address field."""
node = String
hint = _('A valid email')
@classmethod
def validation(cls, **kwargs):
kwargs['validator'] = Email()
return super(EmailField, cls).validation(**kwargs)
@registry.add('url')
class URLField(TypeField):
"""A URL field."""
node = String
hint = _('A valid URL')
@classmethod
def validation(cls, **kwargs):
# This one comes from Django
# https://github.com/django/django/blob/273b96/
# django/core/validators.py#L45-L52
urlpattern = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
kwargs['validator'] = Regex(urlpattern, msg="Invalid URL")
return super(URLField, cls).validation(**kwargs)
class AutoNowMixin(object):
"""Mixin to share ``autonow`` mechanism for both date and datetime fields.
"""
autonow = False
@classmethod
def definition(cls, **kwargs):
schema = super(AutoNowMixin, cls).definition()
schema.add(SchemaNode(Boolean(), name='autonow',
missing=cls.autonow))
return schema
@classmethod
def validation(cls, **kwargs):
autonow = kwargs.get('autonow', cls.autonow)
if autonow:
kwargs['missing'] = cls.auto_value
return super(AutoNowMixin, cls).validation(**kwargs).bind()
@registry.add('date')
class DateField(AutoNowMixin, TypeField):
"""A date field (ISO_8601, yyyy-mm-dd)."""
node = Date
hint = _('A date (yyyy-mm-dd)')
@deferred
def auto_value(node, kw):
return datetime.date.today()
@registry.add('datetime')
class DateTimeField(AutoNowMixin, TypeField):
"""A date time field (ISO_8601, yyyy-mm-ddTHH:MMZ)."""
node = DateTime
hint = _('A date with time (yyyy-mm-ddTHH:MM)')
@deferred
def auto_value(node, kw):
return datetime.datetime.now()
@registry.add('group')
class GroupField(TypeField):
@classmethod
def definition(cls, **kwargs):
schema = super(GroupField, cls).definition(**kwargs)
# Keep the ``type`` node only
schema.children = [c for c in schema.children
if c.name not in ('hint', 'name', 'required')]
schema.add(SchemaNode(String(), name='description', missing=drop))
schema.add(SchemaNode(Sequence(), SchemaNode(TypeFieldNode()),
name='fields', validator=Length(min=1)))
return schema
@classmethod
def validation(cls, **kwargs):
rootnode = kwargs.pop('root')
# Add the group fields to the model definition node
for field in kwargs['fields']:
field['root'] = rootnode
fieldtype = field.pop('type')
rootnode.add(registry.validation(fieldtype, **field))
# Ignore the group validation itself
return SchemaNode(String(), missing=drop)
| |
##################################################
#
# GP4_Test.py - Base class for development tests
#
##################################################
import sys, unittest
sys.path.append("/home/gwatson/Work/GP4/src")
try:
from GP4.GP4_CompilerHelp import compile_string
from GP4.GP4_Runtime import Runtime
from GP4.GP4_Execute import run_control_function
from GP4.GP4_Utilities import *
import GP4.GP4_Exceptions
except ImportError, err:
print "Unable to load GP4 libs. sys.path is:"
for p in sys.path: print "\t",p
print err
## Compile and run a GP4 program provided as a string.
# @param program : String. The program.
# @param debug : Integer. Debug flags
# @return the P4 object
def simple_test(program, debug=0):
''' Given a string (GP4 program) in program, compile and run it.
'''
p4 = compile_string( program=program )
if not p4:
print "Hmmm. Syntax error?"
else:
print p4
return p4
## Create P4 object from program and a runtime to go with it.
# @param program : String. The program.
# @param debug : Integer. Debug flags
# @return ( P4 object, runtime object )
def create_P4_and_runtime(program, debug=0):
''' Given a string (GP4 program) in program, compile it and create P4
'''
p4 = simple_test(program, debug)
runtime = Runtime(p4)
p4.check_self_consistent()
return (p4 , runtime)
## Compile and run a GP4 program provided as a string.
# @param program : String. The program.
# @param pkt : [ byte ] i.e. list of integers
# @param init_state : String. Name of initial parser state
# @param init_ctrl : String. Name of initial control function. If None then dont execute
# @param debug : Integer. Debug flags
# @return (p4, err, bytes_used) : (err !=None if error), bytes_used = number of bytes consumed from header.
def parse_and_run_test(program, pkt, init_state='start', init_ctrl='', debug=0):
p4 = compile_string( program=program )
if not p4:
print "Hmmm. Syntax error?"
sys.exit(1)
runtime = Runtime(p4)
p4.check_self_consistent()
err, bytes_used = runtime.parse_packet(pkt, init_state)
if err:
return (p4, err, bytes_used)
if init_ctrl: run_control_function(p4, pkt, init_ctrl )
return (p4, '', bytes_used )
## Compile and run a sequence of GP4 Runtime commands provided as strings.
# @param program : String. The program.
# @param setup_cmds : [ runtime_cmds ]. Things to do before parsing the first packet.
# @param pkts : [ [ byte ] ] i.e. list of list of integers
# @param init_state : String. Name of initial parser state
# @param init_ctrl : String. Name of initial control function. If None then dont execute
# @param debug : Integer. Debug flags
# @return (p4, err, bytes_used) : (err !=None if error), bytes_used = number of bytes consumed from header.
def setup_tables_parse_and_run_test( program, setup_cmds=[], pkts=[],
init_state='start', init_ctrl='', debug=0):
p4, runtime = create_P4_and_runtime(program, debug=0)
for cmd in setup_cmds:
runtime.run_cmd(cmd)
total_bytes_used = 0
for pkt in pkts:
err, bytes_used = runtime.parse_packet(pkt, init_state)
if err:
return (p4, err, bytes_used)
if init_ctrl: run_control_function(p4, pkt, init_ctrl )
total_bytes_used += bytes_used
return (p4, '', total_bytes_used )
## Compile and run a sequence of GP4 Runtime commands provided as strings.
# @param p4 : p4 object
# @param runtime : runtime object
# @param setup_cmds : [ runtime_cmds ].
# @return None
def run_cmds( p4, runtime, setup_cmds=[] ):
for cmd in setup_cmds:
runtime.run_cmd(cmd)
## Given P4 and runtime, process a bunch of packets.
# @param p4 : p4 object
# @param runtime : runtime object
# @param pkts : [ [ byte ] ] i.e. list of list of integers
# @param init_state : String. Name of initial parser state
# @param init_ctrl : String. Name of initial control function. If None then dont execute
# @param debug : Integer. Debug flags
# @return ( err, bytes_used, pkts_out) : (err !=None if error), bytes_used = number of bytes consumed from header. pkts_out = [ [ byte ] ]
def process_pkts(p4, runtime, pkts=[], init_state='start', init_ctrl='', debug=0):
total_bytes_used = 0
pkts_out = []
for pkt in pkts:
err, bytes_used = runtime.parse_packet(pkt, init_state)
if err:
return (err, bytes_used, pkts_out)
if init_ctrl:
run_control_function( p4, pkt, init_ctrl )
pkt_out = p4.deparse_packet(pkt, bytes_used)
pkts_out.append(pkt_out)
total_bytes_used += bytes_used
return ('', total_bytes_used, pkts_out )
class GP4_Test(unittest.TestCase):
def setUp(self): pass
def tearDown(self):pass
## Check that the specified field has the specified value
# @param self : test
# @param p4 : p4 object
# @param field_ref : String. e.g. 'L2_hdr.DA' or 'vlan[3].my_field'
# @param val : Integer: expected value or "invalid"
# @returns None: will assert a failure
def check_field(self, p4, field_ref, val):
# extract index, if any
hdr_name, hdr_index ,field_name = get_hdr_hdr_index_field_name_from_string(field_ref)
if val == 'invalid':
self.assert_(not p4.check_hdr_inst_is_valid(hdr_name, hdr_index),
"Expected hdr '%s' to be invalid, but was valid." % hdr_name)
return
# Now get the actual header object from P4.
hdr_i = p4.get_or_create_hdr_inst(hdr_name, hdr_index)
self.assert_( hdr_i,"Unable to find header from field ref:" + field_ref)
act_val = hdr_i.get_field_value(field_name)
if act_val == None and val == None: return
self.assert_( act_val != None,
"Field %s returned value None: incorrect field name perhaps?" % field_ref )
self.assert_( act_val == val, "Expected field %s to have value 0x%x but saw 0x%x" %
( field_ref, val, act_val ) )
## Check that the specified header has the specified value.
# @param self : test
# @param p4 : p4 object
# @param hdr_ref : String. e.g. 'L2_hdr' or 'vlan[3]'
# @param val : "invalid"
# @returns None: will assert a failure
def check_header(self, p4, hdr_ref, val):
# extract index, if any
hdr_name, hdr_index = get_hdr_hdr_index_from_string(hdr_ref)
if val == 'invalid':
self.assert_(not p4.check_hdr_inst_is_valid(hdr_name, hdr_index),
"Expected hdr '%s' to be invalid, but was valid." % hdr_name)
return
# dont know how to process any val other than 'invalid'
self.assert_( False,
"check_header: given val of %s but dont know how to check that against a hdr." %
str(val) )
## Check that the specified table has the specified props
# @param self : test
# @param p4 : p4 object
# @param table_name : String.
# @param min_size : Integer: expected min_size
# @param max_size : Integer: expected max_size
# @returns None: will assert a failure
def check_table(self, p4, table_name, min_size=None, max_size=None):
tbl = p4.get_table(table_name)
self.assert_( tbl,"Table '%s' is not defined." % table_name)
if min_size != None:
self.assert_( tbl.min_size == min_size,
"Table '%s' min_size is %s but expected %s." % (table_name, `tbl.min_size`, `min_size`))
if max_size != None:
self.assert_( tbl.max_size == max_size,
"Table '%s' max_size is %s but expected %s." % (table_name, `tbl.max_size`, `max_size`))
## Check that the exp_pkts_out matches actual pkts out
# @param self : test
# @param exp_pkts_out : [ [ byte ] ] - expected packet sequence
# @param pkts_out : [ [ byte ] ] - actual packet sequence
# @returns None: will assert a failure
def check_pkts_out(self, exp_pkts_out, pkts_out):
""" Check each byte then check lengths """
for ix, exp_pkt in enumerate(exp_pkts_out):
if ix < len(pkts_out): # check it
pkt = pkts_out[ix]
for bix, byte in enumerate(exp_pkt):
if bix < len(pkt): # check it
self.assert_( byte == pkt[bix],
"Pkt %d: byte offset %d expected 0x%x but saw 0x%x" %
(ix, bix, byte, pkt[bix]) )
self.assert_(len(exp_pkt) == len(pkt),
"Pkt number %d: expected len %d but saw len %d" %
(ix, len(exp_pkt), len(pkt)) )
self.assert_(len(exp_pkts_out) == len(pkts_out),
"Expected to see %d packets coming out but saw %d." %
(len(exp_pkts_out), len(pkts_out)) )
| |
"""Standalone Authenticator."""
import argparse
import collections
import logging
import socket
import threading
import OpenSSL
import six
import zope.interface
from acme import challenges
from acme import standalone as acme_standalone
from certbot import errors
from certbot import interfaces
from certbot.plugins import common
from certbot.plugins import util
logger = logging.getLogger(__name__)
class ServerManager(object):
"""Standalone servers manager.
Manager for `ACMEServer` and `ACMETLSServer` instances.
`certs` and `http_01_resources` correspond to
`acme.crypto_util.SSLSocket.certs` and
`acme.crypto_util.SSLSocket.http_01_resources` respectively. All
created servers share the same certificates and resources, so if
you're running both TLS and non-TLS instances, HTTP01 handlers
will serve the same URLs!
"""
_Instance = collections.namedtuple("_Instance", "server thread")
def __init__(self, certs, http_01_resources):
self._instances = {}
self.certs = certs
self.http_01_resources = http_01_resources
def run(self, port, challenge_type):
"""Run ACME server on specified ``port``.
This method is idempotent, i.e. all calls with the same pair of
``(port, challenge_type)`` will reuse the same server.
:param int port: Port to run the server on.
:param challenge_type: Subclass of `acme.challenges.Challenge`,
either `acme.challenge.HTTP01` or `acme.challenges.TLSSNI01`.
:returns: Server instance.
:rtype: ACMEServerMixin
"""
assert challenge_type in (challenges.TLSSNI01, challenges.HTTP01)
if port in self._instances:
return self._instances[port].server
address = ("", port)
try:
if challenge_type is challenges.TLSSNI01:
server = acme_standalone.TLSSNI01Server(address, self.certs)
else: # challenges.HTTP01
server = acme_standalone.HTTP01Server(
address, self.http_01_resources)
except socket.error as error:
raise errors.StandaloneBindError(error, port)
thread = threading.Thread(
# pylint: disable=no-member
target=server.serve_forever)
thread.start()
# if port == 0, then random free port on OS is taken
# pylint: disable=no-member
real_port = server.socket.getsockname()[1]
self._instances[real_port] = self._Instance(server, thread)
return server
def stop(self, port):
"""Stop ACME server running on the specified ``port``.
:param int port:
"""
instance = self._instances[port]
logger.debug("Stopping server at %s:%d...",
*instance.server.socket.getsockname()[:2])
instance.server.shutdown()
# Not calling server_close causes problems when renewing multiple
# certs with `certbot renew` using TLSSNI01 and PyOpenSSL 0.13
instance.server.server_close()
instance.thread.join()
del self._instances[port]
def running(self):
"""Return all running instances.
Once the server is stopped using `stop`, it will not be
returned.
:returns: Mapping from ``port`` to ``server``.
:rtype: tuple
"""
return dict((port, instance.server) for port, instance
in six.iteritems(self._instances))
SUPPORTED_CHALLENGES = [challenges.TLSSNI01, challenges.HTTP01]
def supported_challenges_validator(data):
"""Supported challenges validator for the `argparse`.
It should be passed as `type` argument to `add_argument`.
"""
challs = data.split(",")
unrecognized = [name for name in challs
if name not in challenges.Challenge.TYPES]
if unrecognized:
raise argparse.ArgumentTypeError(
"Unrecognized challenges: {0}".format(", ".join(unrecognized)))
choices = set(chall.typ for chall in SUPPORTED_CHALLENGES)
if not set(challs).issubset(choices):
raise argparse.ArgumentTypeError(
"Plugin does not support the following (valid) "
"challenges: {0}".format(", ".join(set(challs) - choices)))
return data
@zope.interface.implementer(interfaces.IAuthenticator)
@zope.interface.provider(interfaces.IPluginFactory)
class Authenticator(common.Plugin):
"""Standalone Authenticator.
This authenticator creates its own ephemeral TCP listener on the
necessary port in order to respond to incoming tls-sni-01 and http-01
challenges from the certificate authority. Therefore, it does not
rely on any existing server program.
"""
description = "Automatically use a temporary webserver"
def __init__(self, *args, **kwargs):
super(Authenticator, self).__init__(*args, **kwargs)
# one self-signed key for all tls-sni-01 certificates
self.key = OpenSSL.crypto.PKey()
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
self.served = collections.defaultdict(set)
# Stuff below is shared across threads (i.e. servers read
# values, main thread writes). Due to the nature of CPython's
# GIL, the operations are safe, c.f.
# https://docs.python.org/2/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
self.certs = {}
self.http_01_resources = set()
self.servers = ServerManager(self.certs, self.http_01_resources)
@classmethod
def add_parser_arguments(cls, add):
add("supported-challenges",
help="Supported challenges. Preferred in the order they are listed.",
type=supported_challenges_validator,
default=",".join(chall.typ for chall in SUPPORTED_CHALLENGES))
@property
def supported_challenges(self):
"""Challenges supported by this plugin."""
return [challenges.Challenge.TYPES[name] for name in
self.conf("supported-challenges").split(",")]
@property
def _necessary_ports(self):
necessary_ports = set()
if challenges.HTTP01 in self.supported_challenges:
necessary_ports.add(self.config.http01_port)
if challenges.TLSSNI01 in self.supported_challenges:
necessary_ports.add(self.config.tls_sni_01_port)
return necessary_ports
def more_info(self): # pylint: disable=missing-docstring
return("This authenticator creates its own ephemeral TCP listener "
"on the necessary port in order to respond to incoming "
"tls-sni-01 and http-01 challenges from the certificate "
"authority. Therefore, it does not rely on any existing "
"server program.")
def prepare(self): # pylint: disable=missing-docstring
pass
def get_chall_pref(self, domain):
# pylint: disable=unused-argument,missing-docstring
return self.supported_challenges
def perform(self, achalls): # pylint: disable=missing-docstring
renewer = self.config.verb == "renew"
if any(util.already_listening(port, renewer) for port in self._necessary_ports):
raise errors.MisconfigurationError(
"At least one of the (possibly) required ports is "
"already taken.")
try:
return self.perform2(achalls)
except errors.StandaloneBindError as error:
display = zope.component.getUtility(interfaces.IDisplay)
if error.socket_error.errno == socket.errno.EACCES:
display.notification(
"Could not bind TCP port {0} because you don't have "
"the appropriate permissions (for example, you "
"aren't running this program as "
"root).".format(error.port))
elif error.socket_error.errno == socket.errno.EADDRINUSE:
display.notification(
"Could not bind TCP port {0} because it is already in "
"use by another process on this system (such as a web "
"server). Please stop the program in question and then "
"try again.".format(error.port))
else:
raise # XXX: How to handle unknown errors in binding?
def perform2(self, achalls):
"""Perform achallenges without IDisplay interaction."""
responses = []
for achall in achalls:
if isinstance(achall.chall, challenges.HTTP01):
server = self.servers.run(
self.config.http01_port, challenges.HTTP01)
response, validation = achall.response_and_validation()
self.http_01_resources.add(
acme_standalone.HTTP01RequestHandler.HTTP01Resource(
chall=achall.chall, response=response,
validation=validation))
else: # tls-sni-01
server = self.servers.run(
self.config.tls_sni_01_port, challenges.TLSSNI01)
response, (cert, _) = achall.response_and_validation(
cert_key=self.key)
self.certs[response.z_domain] = (self.key, cert)
self.served[server].add(achall)
responses.append(response)
return responses
def cleanup(self, achalls): # pylint: disable=missing-docstring
# reduce self.served and close servers if none challenges are served
for server, server_achalls in self.served.items():
for achall in achalls:
if achall in server_achalls:
server_achalls.remove(achall)
for port, server in six.iteritems(self.servers.running()):
if not self.served[server]:
self.servers.stop(port)
| |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing wrapper classes around meta-descriptors.
This module contains dataclasses which wrap the descriptor protos
defined in google/protobuf/descriptor.proto (which are descriptors that
describe descriptors).
"""
import dataclasses
import re
import os
from collections import defaultdict
from google.api import field_behavior_pb2
from google.api import resource_pb2
from google.api import client_pb2
from google.api import annotations_pb2
from google.longrunning import operations_pb2
from google.protobuf import descriptor_pb2
from google.protobuf.descriptor_pb2 import FieldDescriptorProto
from src.comparator.resource_database import ResourceDatabase
from typing import Dict, Sequence, Optional, Tuple, cast, List
COMMON_PACKAGES = [
"google.longrunning",
"google.cloud",
"google.cloud.location",
"google.protobuf",
"google.type",
"google.rpc",
"google.api",
]
def _get_source_code_line(source_code_locations, path):
if not source_code_locations or path not in source_code_locations:
return -1
# The line number in `span` is zero-based, +1 to get the actual line number in .proto file.
return source_code_locations[path].span[0] + 1
class WithLocation:
"""Wrap the attribute with location information."""
def __init__(self, value, source_code_locations, path, proto_file_name=None):
self.value = value
self.path = path
self.source_code_locations = source_code_locations
self.proto_file_name = proto_file_name
@property
def source_code_line(self):
return _get_source_code_line(self.source_code_locations, self.path)
@dataclasses.dataclass(frozen=True)
class EnumValue:
"""Description of an enum value.
enum_value_pb: the descriptor of EnumValue.
proto_file_name: the proto file where the EnumValue exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the EnumValue, by querying the above dictionary using the path,
we can get the location information.
"""
enum_value_pb: descriptor_pb2.EnumValueDescriptorProto
proto_file_name: str
source_code_locations: Dict[Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location]
path: Tuple[int]
nested_path: List[str]
def __getattr__(self, name):
return getattr(self.enum_value_pb, name)
@property
def source_code_line(self):
"""Return the start line number of EnumValue definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
@dataclasses.dataclass(frozen=True)
class Enum:
"""Description of an enum.
enum_pb: the descriptor of Enum.
proto_file_name: the proto file where the Enum exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the Enum, by querying the above dictionary using the path,
we can get the location information.
"""
enum_pb: descriptor_pb2.EnumDescriptorProto
proto_file_name: str
source_code_locations: Dict[Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location]
path: Tuple[int, ...]
full_name: str
nested_path: List[str]
def __getattr__(self, name):
return getattr(self.enum_pb, name)
@property
def values(self) -> Dict[int, EnumValue]:
"""Return EnumValues in this Enum.
Returns:
Dict[int, EnumValue]: EnumValue is identified by number.
"""
enum_value_map = {}
for i, enum_value in enumerate(self.enum_pb.value):
nested_path = self.nested_path + [enum_value.name]
enum_value_map[enum_value.number] = EnumValue(
enum_value_pb=enum_value,
proto_file_name=self.proto_file_name,
source_code_locations=self.source_code_locations,
# EnumDescriptorProto.value has field number 2.
# So we append (2, value_index) to the path.
path=self.path + (2, i),
nested_path=nested_path,
)
return enum_value_map
@property
def source_code_line(self):
"""Return the start line number of Enum definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
class Field:
"""Description of a field.
field_pb: the descriptor of Field.
proto_file_name: the proto file where the Field exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the Field, by querying the above dictionary using the path,
we can get the location information.
resource_database: global resource database that contains all file-level resource definitions
and message-level resource options.
message_resource: message-level resource definition.
api_version: the version of the API definition files.
map_entry: type of the field if it is a map.
oneof_name: the oneof name that the field belongs to if it is in any oneof.
"""
def __init__(
self,
field_pb: FieldDescriptorProto,
proto_file_name: str,
source_code_locations: Dict[
Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location
],
path: Tuple[int],
resource_database: Optional[ResourceDatabase] = None,
message_resource: Optional[WithLocation] = None,
api_version: Optional[str] = None,
map_entry=None,
oneof_name: Optional[str] = None,
nested_path: List[str] = [],
):
self.field_pb = field_pb
self.proto_file_name = proto_file_name
self.source_code_locations = source_code_locations
self.path = path
# We need the resource database information to determine if the removal or change
# of the resource_reference annotation is breaking or not.
self.resource_database = resource_database
self.message_resource = message_resource
self.api_version = api_version
self.map_entry = map_entry
self.oneof_name = oneof_name
self.nested_path = nested_path
def __getattr__(self, name):
return getattr(self.field_pb, name)
@property
def name(self):
"""Return the name of the field."""
return self.field_pb.name
@property
def number(self):
"""Return the number of the field."""
return self.field_pb.number
@property
def repeated(self) -> WithLocation:
"""Return True if this is a repeated field, False otherwise.
Returns:
bool: Whether this field is repeated.
"""
# For proto3, only LABEL_REPEATED is explicitly specified which has a path.
label_repeated = (
FieldDescriptorProto().Label.Name(self.field_pb.label) == "LABEL_REPEATED"
)
# FieldDescriptorProto.label has field number 4.
return WithLocation(
label_repeated,
self.source_code_locations,
self.path + (4,) if label_repeated else self.path,
)
@property
def required(self):
"""Return True if this field is required, False otherwise.
Returns:
bool: Whether this field is required in field_behavior annotation.
"""
required = (
field_behavior_pb2.FieldBehavior.Value("REQUIRED")
in self.field_pb.options.Extensions[field_behavior_pb2.field_behavior]
)
# fmt: off
return WithLocation(
required,
self.source_code_locations,
# FieldOption has field number 8, field_behavior has field
# number 1052. One field can have multiple behaviors and
# required attribute has index 0.
self.path + (8, 1052, 0),
)
# fmt: on
@property
def proto_type(self):
"""Return the proto type constant e.g. `enum`"""
return WithLocation(
FieldDescriptorProto()
.Type.Name(self.field_pb.type)[len("TYPE_") :]
.lower(),
self.source_code_locations,
# FieldDescriptorProto.type has field number 5.
self.path + (5,),
)
@property
def is_primitive_type(self):
"""Return true if the proto_type is primitive python type like `string`"""
NON_PRIMITIVE_TYPE = ["enum", "message", "group"]
return self.proto_type.value not in NON_PRIMITIVE_TYPE
@property
def is_map_type(self):
"""Return true if the field is map type."""
return self.map_entry
@property
def type_name(self):
"""Return the type_name if the proto_type is not primitive, return `None` otherwise.
For message and enum types, this is the name of full type like `.tutorial.example.Enum`"""
return (
None
if self.is_primitive_type
else WithLocation(
# FieldDescriptorProto.type_name has field number 6.
self.field_pb.type_name,
self.source_code_locations,
self.path + (6,),
)
)
@property
def map_entry_type(self):
# Get the key and value type for the map entry.
def get_type(name):
field = self.map_entry[name]
return (
field.proto_type.value
if field.is_primitive_type
else field.type_name.value
)
if self.is_map_type:
return {name: get_type(name) for name in ("key", "value")}
return None
@property
def oneof(self) -> bool:
"""Return if the field is in oneof"""
return self.field_pb.HasField("oneof_index")
@property
def proto3_optional(self) -> bool:
"""Return if the field is proto3_optional"""
proto3_optional = self.field_pb.proto3_optional
if not self.oneof and proto3_optional:
raise TypeError(
"When proto3_optional is true, this field must belong to a oneof."
)
return proto3_optional
@property
def resource_reference(self) -> Optional[WithLocation]:
"""Return the resource_reference annotation of the field if any"""
resource_ref = self.field_pb.options.Extensions[resource_pb2.resource_reference]
if not resource_ref.type and not resource_ref.child_type:
return None
# FieldDescriptorProto.options has field number 8. And `resource_reference` takes field number 1055.
# If the reference uses `type`, the field number is 1,
# if the reference uses `child_type`, the field number is 2.
resource_ref_path = (
self.path + (8, 1055, 1) if resource_ref.type else self.path + (8, 1055, 2)
)
# In some proto definitions, the reference `type` and `child_type` share
# the same field number 1055.
if (
not self.source_code_locations
or resource_ref_path not in self.source_code_locations
):
resource_ref_path = self.path + (8, 1055)
return WithLocation(resource_ref, self.source_code_locations, resource_ref_path)
@property
def child_type(self) -> bool:
"""Return True if the resource_reference has child_type, False otherwise"""
resource_ref = self.field_pb.options.Extensions[resource_pb2.resource_reference]
return True if len(resource_ref.child_type) > 0 else False
@property
def source_code_line(self):
"""Return the start line number of Field definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
@dataclasses.dataclass(frozen=True)
class Oneof:
"""Description of a field."""
oneof_pb: descriptor_pb2.OneofDescriptorProto
def __getattr__(self, name):
return getattr(self.oneof_pb, name)
class Message:
"""Description of a message (defined with the ``message`` keyword).
message_pb: the descriptor of Message.
proto_file_name: the proto file where the Message exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the Message, by querying the above dictionary using the path,
we can get the location information.
resource_database: global resource database that contains all file-level resource definitions
and message-level resource options.
api_version: the version of the API definition files.
"""
def __init__(
self,
message_pb: descriptor_pb2.DescriptorProto,
proto_file_name: str,
source_code_locations: Dict[
Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location
],
path: Tuple[int, ...],
resource_database: Optional[ResourceDatabase] = None,
api_version: Optional[str] = None,
full_name: Optional[str] = None,
nested_path: List[str] = [],
):
self.message_pb = message_pb
self.proto_file_name = proto_file_name
self.source_code_locations = source_code_locations
self.path = path
self.resource_database = resource_database
self.api_version = api_version
self.full_name = full_name
self.nested_path = nested_path
def __getattr__(self, name):
return getattr(self.message_pb, name)
@property
def name(self) -> str:
"""Return the name of this message."""
return self.message_pb.name
@property
def fields(self) -> Dict[int, Field]:
"""Return fields in this message.
Returns:
Dict[int, Field]: Field is identified by number.
"""
fields_map = {}
for i, field in enumerate(self.message_pb.field):
# Convert field name to pascal case.
# The auto-generated nested message uses the transformed
# name of the field (name `first_field` is converted to `FirstFieldEntry`)
is_oneof = bool(self.oneofs and field.HasField("oneof_index"))
# `oneof_index` gives the index of a oneof in the containing type's oneof_decl
# list. This field is a member of that oneof.
oneof_name = (
list(self.oneofs.keys())[field.oneof_index] if is_oneof else None
)
field_map_entry_name = (
field.name.replace("_", " ").title().replace(" ", "") + "Entry"
)
nested_path = self.nested_path + [field.name]
map_entry = (
self.map_entries[field_map_entry_name]
if field_map_entry_name in self.map_entries
else None
)
fields_map[field.number] = Field(
field_pb=field,
proto_file_name=self.proto_file_name,
source_code_locations=self.source_code_locations,
path=self.path + (2, i),
resource_database=self.resource_database,
message_resource=self.resource,
api_version=self.api_version,
map_entry=map_entry,
oneof_name=oneof_name,
nested_path=nested_path,
)
return fields_map
@property
def oneofs(self) -> Dict[str, Oneof]:
"""Return a dictionary of wrapped oneofs for the given message."""
return {
oneof_pb.name: Oneof(oneof_pb) for oneof_pb in self.message_pb.oneof_decl
}
@property
def nested_messages(self) -> Dict[str, "Message"]:
"""Return the nested messages in the message. Message is identified by name."""
nested_messages_map = {}
for i, message in enumerate(self.message_pb.nested_type):
# Exclude the auto-generated map_entries message, since
# the generated message does not have real source code location.
# Including those messages in the comparator will fail the source code
# information extraction.
if message.options.map_entry:
continue
nested_path = self.nested_path + ["message " + message.name + " {"]
nested_messages_map[message.name] = Message(
message_pb=message,
proto_file_name=self.proto_file_name,
source_code_locations=self.source_code_locations,
# DescriptorProto.nested_type has field number 3.
# So we append (3, nested_message_index) to the path.
path=self.path
+ (
3,
i,
),
resource_database=self.resource_database,
api_version=self.api_version,
full_name=self.full_name + "." + message.name,
nested_path=nested_path,
)
return nested_messages_map
@property
def map_entries(self) -> Dict[str, Dict[str, Field]]:
# If the nested message is auto-generated map entry for the maps field,
# the message name is field_name + 'Entry', and it has two nested fields (key, value).
#
# For maps fields:
# map<KeyType, ValueType> map_field = 1;
# The parsed descriptor looks like:
# message MapFieldEntry {
# option map_entry = true;
# optional KeyType key = 1;
# optional ValueType value = 2;
# }
# repeated MapFieldEntry map_field = 1;
map_entries = {}
for message in self.message_pb.nested_type:
if message.options.map_entry:
fields = {field.name: field for field in message.field}
if not {"key", "value"} <= fields.keys():
raise TypeError(
"The auto-generated map entry message should have key and value fields."
)
map_entries[message.name] = {
"key": Field(
field_pb=fields["key"],
source_code_locations=self.source_code_locations,
proto_file_name=self.proto_file_name,
path=self.path,
),
"value": Field(
field_pb=fields["value"],
source_code_locations=self.source_code_locations,
proto_file_name=self.proto_file_name,
path=self.path,
),
}
return map_entries
@property
def nested_enums(self) -> Dict[str, Enum]:
"""Return the nested enums in the message. Enum is identified by name."""
nested_enum_map = {}
for i, enum in enumerate(self.message_pb.enum_type):
nested_path = self.nested_path + ["enum " + enum.name + " {"]
nested_enum_map[enum.name] = Enum(
enum_pb=enum,
proto_file_name=self.proto_file_name,
source_code_locations=self.source_code_locations,
# DescriptorProto.enum_type has field number 4.
# So we append (4, nested_enum_index) to the path.
path=self.path + (4, i),
full_name=self.full_name + "." + enum.name,
nested_path=nested_path,
)
return nested_enum_map
@property
def resource(self) -> Optional[WithLocation]:
"""If this message describes a resource, return the resource."""
resource = self.message_pb.options.Extensions[resource_pb2.resource]
if not resource.type or not resource.pattern:
return None
return WithLocation(
resource,
self.source_code_locations,
# MessageOptions has field nnumber 7 and resource options
# take the field number 1053.
self.path + (7, 1053),
self.proto_file_name,
)
@property
def source_code_line(self):
"""Return the start line number of Message definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
class Method:
"""Description of a method (defined with the ``rpc`` keyword).
method_pb: the descriptor of Method.
messages_map: the map that contains all messages defined in the API definition files and
the dependencies. The key is message name, and value is the Message class.
proto_file_name: the proto file where the Method exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the Method, by querying the above dictionary using the path,
we can get the location information.
"""
def __init__(
self,
method_pb: descriptor_pb2.MethodDescriptorProto,
messages_map: Dict[str, Message],
proto_file_name: str,
source_code_locations: Dict[
Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location
],
path: Tuple[int],
):
self.method_pb = method_pb
self.messages_map = messages_map
self.proto_file_name = proto_file_name
self.source_code_locations = source_code_locations
self.path = path
@property
def name(self):
"""Return the name of this method."""
return self.method_pb.name
@property
def input(self):
"""Return the shortened input type of a method."""
# MethodDescriptorProto.input_type has field number 2
return WithLocation(
self.method_pb.input_type, self.source_code_locations, self.path + (2,)
)
@property
def output(self):
"""Return the shortened output type of a method."""
# MethodDescriptorProto.output_type has field number 3
return WithLocation(
self.method_pb.output_type, self.source_code_locations, self.path + (3,)
)
@property
def longrunning(self) -> bool:
"""Return True if this is a longrunning method."""
return self.method_pb.output_type.endswith(".google.longrunning.Operation")
@property
def client_streaming(self) -> WithLocation:
"""Return True if this is a client-streamign method."""
# MethodDescriptorProto.client_streaming has field number 5
return WithLocation(
self.method_pb.client_streaming,
self.source_code_locations,
self.path + (5,),
)
@property
def server_streaming(self) -> WithLocation:
"""Return True if this is a server-streaming method."""
# MethodDescriptorProto.client_streaming has field number 6
return WithLocation(
self.method_pb.server_streaming,
self.source_code_locations,
self.path + (6,),
)
@property
def paged_result_field(self) -> Optional[FieldDescriptorProto]:
"""Return the response pagination field if the method is paginated."""
# (AIP 158) The response must not be a streaming response for a paginated method.
if self.server_streaming.value:
return None
# If the output type is `google.longrunning.Operation`, the method is not paginated.
if self.longrunning:
return None
if not self.messages_map or self.output.value not in self.messages_map:
return None
# API should provide a `string next_page_token` field in response message.
# API should provide `int page_size` and `string page_token` fields in request message.
# If the request field lacks any of the expected pagination fields,
# then the method is not paginated.
# Short message name e.g. .example.v1.FooRequest -> FooRequest
response_message = self.messages_map[self.output.value]
request_message = self.messages_map[self.input.value]
response_fields_map = {f.name: f for f in response_message.fields.values()}
request_fields_map = {f.name: f for f in request_message.fields.values()}
for page_field in (
(request_fields_map, "int32", "page_size"),
(request_fields_map, "string", "page_token"),
(response_fields_map, "string", "next_page_token"),
):
field = page_field[0].get(page_field[2], None)
if not field or field.proto_type.value != page_field[1]:
return None
# Return the first repeated field.
# The field containing pagination results should be the first
# field in the message and have a field number of 1.
for field in response_fields_map.values():
if field.repeated.value and field.number == 1:
return field # pytype: disable=bad-return-type # bind-properties
return None
# fmt: off
@property
def lro_annotation(self):
"""Return the LRO operation_info annotation defined for this method."""
# Skip the operations.proto because the `GetOperation` does not have LRO annotations.
# Remove this condition will fail the service-annotation test in cli integration test.
if not self.output.value.endswith("google.longrunning.Operation") or self.proto_file_name == "google/longrunning/operations.proto":
return None
op = self.method_pb.options.Extensions[operations_pb2.operation_info]
if not op.response_type or not op.metadata_type:
return None
lro_annotation = {
"response_type": op.response_type,
"metadata_type": op.metadata_type,
}
# MethodDescriptorProto.method_options has field number 4,
# and MethodOptions.extensions[operation_info] has field number 1049.
return WithLocation(
lro_annotation,
self.source_code_locations,
self.path + (4, 1049),
)
@property
def method_signatures(self) -> WithLocation:
"""Return the signatures defined for this method."""
signatures = self.method_pb.options.Extensions[client_pb2.method_signature]
fields = [
field.strip() for sig in signatures for field in sig.split(",") if field
]
# MethodDescriptorProto.method_options has field number 4,
# and MethodOptions.extensions[method_signature] has field number 1051.
return WithLocation(
fields,
self.source_code_locations,
self.path + (4, 1051, 0),
)
@property
def http_annotation(self):
"""Return the http annotation defined for this method.
The example return is:
{'http_method': 'post', 'http_uri': '/v1/example:foo', 'http_body': '*'}
return `None` if no http annotation exists.
"""
http = self.method_pb.options.Extensions[annotations_pb2.http]
potential_verbs = {
"get": http.get,
"put": http.put,
"post": http.post,
"delete": http.delete,
"patch": http.patch,
"custom": http.custom.path,
}
http_annotation = next(
(
{"http_method": verb, "http_uri": value, "http_body": http.body}
for verb, value in potential_verbs.items()
if value
),
None,
)
# MethodDescriptorProto.method_options has field number 4,
# and MethodOptions.extensions[http_annotation] has field number 72295728.
return WithLocation(
http_annotation,
self.source_code_locations,
self.path + (4, 72295728,)
)
# fmt: on
@property
def source_code_line(self):
"""Return the start line number of method definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
class Service:
"""Description of a service (defined with the ``service`` keyword).
service_pb: the decriptor of service.
messages_map: the map that contains all messages defined in the API definition files and
the dependencies. The key is message name, and value is the Message class.
proto_file_name: the proto file where the Service exists.
source_code_locations: the dictionary that contains all the source_code_info in the file_descriptor_set.
path: the path to the MethServiceod, by querying the above dictionary using the path,
we can get the location information.
api_version: the version of the API definition files.
"""
def __init__(
self,
service_pb: descriptor_pb2.ServiceDescriptorProto,
messages_map: Dict[str, Message],
proto_file_name: str,
source_code_locations: Dict[
Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location
],
path: Tuple[int],
api_version: Optional[str] = None,
):
self.service_pb = service_pb
self.messages_map = messages_map
self.proto_file_name = proto_file_name
self.source_code_locations = source_code_locations
self.path = path
self.api_version = api_version
@property
def name(self):
"""Return the name of the service."""
return self.service_pb.name
@property
def methods(self) -> Dict[str, Method]:
"""Return the methods defined in the service. Method is identified by name."""
# fmt: off
return {
method.name: Method(
method,
self.messages_map,
self.proto_file_name,
self.source_code_locations,
# ServiceDescriptorProto.method has field number 2.
# So we append (2, method_index) to the path.
self.path + (2, i,),
)
for i, method in enumerate(self.service_pb.method)
}
# fmt: on
@property
def host(self) -> Optional[WithLocation]:
"""Return the hostname for this service, if specified.
Returns:
str: The hostname, with no protocol and no trailing ``/``.
"""
if not self.service_pb.options.Extensions[client_pb2.default_host]:
return None
default_host = self.service_pb.options.Extensions[client_pb2.default_host]
return WithLocation(
value=default_host,
source_code_locations=self.source_code_locations,
# The ServiceOptions has field number 3, and default
# host option has field number 1049.
path=self.path + (3, 1049),
proto_file_name=self.proto_file_name,
)
@property
def oauth_scopes(self) -> Optional[Sequence[WithLocation]]:
"""Return a sequence of oauth scopes, if applicable.
Returns:
Sequence[str]: A sequence of OAuth scopes.
"""
# fmt: off
oauth_scopes = []
for scope in self.service_pb.options.Extensions[client_pb2.oauth_scopes].split(","):
if scope:
oauth_scopes.append(
WithLocation(
scope.strip(),
self.source_code_locations,
self.path + (3, 1050),
)
)
return oauth_scopes
# fmt: on
@property
def source_code_line(self):
"""Return the start line number of service definition in the proto file."""
return _get_source_code_line(self.source_code_locations, self.path)
class FileSet:
"""Description of a file_set.
file_set_pb: The FileDescriptorSet object that is obtained by proto compiler.
"""
def __init__(
self,
file_set_pb: descriptor_pb2.FileDescriptorSet,
):
# The default value for every language package option is a dict.
# whose key is the option str, and value is the WithLocation object with
# sourec code information.
self.file_set_pb = file_set_pb
# Create source code location map, key is the file name, value is the
# source code information of every field.
source_code_locations_map = self._get_source_code_locations_map()
# Get the root package from the API definition files.
self.root_package = self.get_root_package(self.file_set_pb)
# Get API version from definition files.
version = r"(?P<version>v[0-9]+(p[0-9]+)?((alpha|beta)[0-9]*)?)"
search_version = re.search(version, self.root_package)
self.api_version = search_version.group() if search_version else None
# Get API definition files. This helps us to compare only the definition files
# and imported dependency information.
self.definition_files = [
f for f in file_set_pb.file if f.package.startswith(self.root_package)
]
# Register all resources in the database.
self.resources_database = self._get_resource_database(
file_set_pb.file, source_code_locations_map
)
# Create global messages/enums map to have all messages/enums registered from the file
# set including the nested messages/enums, since they could also be referenced.
# Key is the full name of the message/enum and value is the Message/Enum object.
self._get_global_info_map(source_code_locations_map)
# Get all **used** information for comparison.
self.packaging_options_map = defaultdict(dict)
self.services_map: Dict[str, Service] = {}
self.enums_map: Dict[str, Enum] = {}
self.messages_map: Dict[str, Message] = {}
# Register all resources in the API definition files in a separate database,
# so that we can avoid comparing redundant resources defined in dependencies.
self.used_resources_database = self._get_resource_database(
self.definition_files, source_code_locations_map
)
path = ()
for fd in self.definition_files:
source_code_locations = (
source_code_locations_map[fd.name]
if source_code_locations_map
else None
)
# Create packaging options map and duplicate the per-language rules for namespaces.
self._get_packaging_options_map(
fd.options, fd.name, source_code_locations, path + (8,)
)
# Creat services map.
for i, service in enumerate(fd.service):
# fmt: off
service_wrapper = Service(
service_pb=service,
messages_map=self.global_messages_map,
proto_file_name=fd.name,
source_code_locations=source_code_locations,
# FileDescriptorProto.service has field number 6
path=path + (6, i,),
api_version=self.api_version,
)
# fmt: on
self.services_map[service.name] = service_wrapper
# Add refered message type for methods into messages map.
for method in service_wrapper.methods.values():
self.messages_map[method.input.value] = self.global_messages_map[
method.input.value
]
self.messages_map[method.output.value] = self.global_messages_map[
method.output.value
]
# All first-level enums defined in the definition files should
# add to the enums map.
self.enums_map.update(
(
self._get_full_name(fd.package, enum.name),
self.global_enums_map[self._get_full_name(fd.package, enum.name)],
)
for enum in fd.enum_type
)
# Add First-level messages to stack for iteration.
# We need to look at the nested fields for the used messages types.
message_stack = []
for message in fd.message_type:
message_full_name = self._get_full_name(fd.package, message.name)
self.messages_map[message_full_name] = self.global_messages_map[
message_full_name
]
message_stack.append(self.messages_map[message_full_name])
while message_stack:
message = message_stack.pop()
for _, field in message.fields.items():
# If the field is a map type, the message type is auto-generated.
if field.is_map_type:
for _, entry_type in field.map_entry_type.items():
self._register_field(entry_type)
elif not field.is_primitive_type:
# If the field is not a map and primitive type, add the
# referenced type to map.
self._register_field(field.type_name.value)
message_stack.extend(list(message.nested_messages.values()))
def _register_field(self, register_type):
# The referenced type could be a message or an enum.
# If the parent message type is already registered, we will skip
# the child type to avoid duplicate comparison.
parent_type = "."
for segment in register_type.split("."):
if parent_type != ".":
parent_type = parent_type + "."
parent_type = parent_type + segment
if parent_type in self.messages_map:
return
if register_type in self.global_messages_map:
self.messages_map[register_type] = self.global_messages_map[register_type]
elif register_type in self.global_enums_map:
self.enums_map[register_type] = self.global_enums_map[register_type]
def _get_global_info_map(self, source_code_locations_map):
self.global_messages_map = {}
self.global_enums_map = {}
for fd in self.file_set_pb.file:
source_code_locations = (
source_code_locations_map[fd.name]
if source_code_locations_map
else None
)
# Register first level enums.
# fmt: off
for i, enum in enumerate(fd.enum_type):
full_name = self._get_full_name(fd.package, enum.name)
self.global_enums_map[full_name] = Enum(
enum_pb=enum,
proto_file_name=fd.name,
source_code_locations=source_code_locations,
path=(5, i),
full_name=full_name,
nested_path=["enum " + enum.name + " {"],
)
# Register first level messages.
message_stack = [
Message(
message_pb=message,
proto_file_name=fd.name,
source_code_locations=source_code_locations,
path=(4, i),
resource_database=self.resources_database,
api_version=self.api_version,
# `.package.outer_message.nested_message`
full_name=self._get_full_name(fd.package, message.name),
nested_path=["message " + message.name + " {"],
)
for i, message in enumerate(fd.message_type)
]
names = [m.name for m in message_stack]
# fmt: on
# Iterate for nested messages and enums.
while message_stack:
message = message_stack.pop()
self.global_messages_map[message.full_name] = message
message_stack.extend(message.nested_messages.values())
self.global_enums_map.update(
(enum.full_name, enum) for enum in message.nested_enums.values()
)
@staticmethod
def get_root_package(file_set_pb: descriptor_pb2.FileDescriptorSet) -> str:
"""
Return the package name of the API being checked.
In this code we don't have an access to "file_to_generate" boolean flag.
We only have parsed proto files in a FileDescriptorSet.
So the idea is to find which files are not listed as imports of any other
proto files (are "roots" of the dependency tree) and take the common prefix
of their packages (ignoring known commmon protos such as
google/cloud/common_resources.proto).
"""
all_files = set()
imported_files = set()
files_by_name = dict()
for file in file_set_pb.file:
files_by_name[file.name] = file
queue = [descriptor_proto for descriptor_proto in file_set_pb.file]
while len(queue) > 0:
descriptor_proto = queue.pop(0)
all_files.add(descriptor_proto.name)
files_by_name[descriptor_proto.name] = descriptor_proto
for dep in descriptor_proto.dependency:
imported_files.add(dep)
if dep not in all_files and dep in files_by_name:
queue.append(files_by_name[dep])
never_imported = [
files_by_name[filename] for filename in all_files.difference(imported_files)
]
api_definition_protos = [
file for file in never_imported if file.package not in COMMON_PACKAGES
]
if len(api_definition_protos) == 0:
return ""
return os.path.commonprefix([file.package for file in api_definition_protos])
def _get_source_code_locations_map(
self,
) -> Dict[str, Dict[Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location]]:
source_code_locations_map = {}
for fd in self.file_set_pb.file:
if not fd.source_code_info:
continue
# Iterate over the source_code_info and place it into a dictionary.
#
# The comments in protocol buffers are sorted by a concept called
# the "path", which is a sequence of integers described in more
# detail below; this code simply shifts from a list to a dict,
# with tuples of paths as the dictionary keys.
source_code_locations = {
tuple(location.path): location
for location in fd.source_code_info.location
}
source_code_locations_map[fd.name] = source_code_locations
return source_code_locations_map
def _get_resource_database(self, files, source_code_locations_map):
resources_database = ResourceDatabase()
for fd in files:
source_code_locations = (
source_code_locations_map[fd.name]
if source_code_locations_map
else None
)
# Register file-level resource definitions in database.
for i, resource in enumerate(
fd.options.Extensions[resource_pb2.resource_definition]
):
# The file option has field number 8, resource definition has
# field number 1053, and the index of the resource should be
# appended to the resource path.
resource_path = (8, 1053, i)
resources_database.register_resource(
WithLocation(
resource, source_code_locations, resource_path, fd.name
)
)
# Register message-level resource definitions in database.
# Put first layer message in stack and iterate them for nested messages.
message_stack = [
# The messages in file has field number 4, the index of the messasge
# should be appended to the resource path. Message option has field
# number 7, and resource option has field number 1053.
WithLocation(message, source_code_locations, (4, i, 7, 1053), fd.name)
for i, message in enumerate(fd.message_type)
]
while message_stack:
message_with_location = message_stack.pop()
message = message_with_location.value
resource = message.options.Extensions[resource_pb2.resource]
if resource.type and resource.pattern:
resources_database.register_resource(
WithLocation(
resource,
source_code_locations,
message_with_location.path,
fd.name,
)
)
for i, nested_message in enumerate(message.nested_type):
# Nested message has field number 3, and index of the
# nested message is appended to the resource path.
# fmt: off
resource_path = message_with_location.path + (3,i,)
message_stack.append(
WithLocation(
nested_message,
source_code_locations,
resource_path,
fd.name,
)
)
# fmt: on
return resources_database
def _get_packaging_options_map(
self,
file_options: descriptor_pb2.FileOptions,
proto_file_name: str,
source_code_locations: Dict[
Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location
],
path: Tuple[int],
):
# The minor version updates are allowed, for example
# `java_package = "com.pubsub.v1"` is updated to `java_package = "com.pubsub.v1beta1".
# But update between two stable versions (e.g. v1 to v2) is not permitted.
packaging_options_path = {
"java_package": (1,),
"java_outer_classname": (8,),
"java_multiple_files": (10,),
"csharp_namespace": (37,),
"go_package": (11,),
"swift_prefix": (39,),
"php_namespace": (41,),
"php_metadata_namespace": (44,),
"php_class_prefix": (40,),
"ruby_package": (45,),
}
# Put default empty set for every packaging options.
for option in packaging_options_path.keys():
if getattr(file_options, option) != "":
self.packaging_options_map[option][
getattr(file_options, option)
] = WithLocation(
getattr(file_options, option),
source_code_locations,
path + packaging_options_path[option],
proto_file_name,
)
def _get_full_name(self, package_name, name) -> str:
return "." + package_name + "." + name
| |
# -*- coding: utf-8 -*-
from __future__ import print_function
import pytest
from pandas.compat import range, lrange
import numpy as np
from pandas import DataFrame, Series, Index, MultiIndex
from pandas.util.testing import assert_frame_equal
import pandas.util.testing as tm
from pandas.tests.frame.common import TestData
# Column add, remove, delete.
class TestDataFrameMutateColumns(TestData):
def test_assign(self):
df = DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]})
original = df.copy()
result = df.assign(C=df.B / df.A)
expected = df.copy()
expected['C'] = [4, 2.5, 2]
assert_frame_equal(result, expected)
# lambda syntax
result = df.assign(C=lambda x: x.B / x.A)
assert_frame_equal(result, expected)
# original is unmodified
assert_frame_equal(df, original)
# Non-Series array-like
result = df.assign(C=[4, 2.5, 2])
assert_frame_equal(result, expected)
# original is unmodified
assert_frame_equal(df, original)
result = df.assign(B=df.B / df.A)
expected = expected.drop('B', axis=1).rename(columns={'C': 'B'})
assert_frame_equal(result, expected)
# overwrite
result = df.assign(A=df.A + df.B)
expected = df.copy()
expected['A'] = [5, 7, 9]
assert_frame_equal(result, expected)
# lambda
result = df.assign(A=lambda x: x.A + x.B)
assert_frame_equal(result, expected)
def test_assign_multiple(self):
df = DataFrame([[1, 4], [2, 5], [3, 6]], columns=['A', 'B'])
result = df.assign(C=[7, 8, 9], D=df.A, E=lambda x: x.B)
expected = DataFrame([[1, 4, 7, 1, 4], [2, 5, 8, 2, 5],
[3, 6, 9, 3, 6]], columns=list('ABCDE'))
assert_frame_equal(result, expected)
def test_assign_alphabetical(self):
# GH 9818
df = DataFrame([[1, 2], [3, 4]], columns=['A', 'B'])
result = df.assign(D=df.A + df.B, C=df.A - df.B)
expected = DataFrame([[1, 2, -1, 3], [3, 4, -1, 7]],
columns=list('ABCD'))
assert_frame_equal(result, expected)
result = df.assign(C=df.A - df.B, D=df.A + df.B)
assert_frame_equal(result, expected)
def test_assign_bad(self):
df = DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]})
# non-keyword argument
with pytest.raises(TypeError):
df.assign(lambda x: x.A)
with pytest.raises(AttributeError):
df.assign(C=df.A, D=df.A + df.C)
with pytest.raises(KeyError):
df.assign(C=lambda df: df.A, D=lambda df: df['A'] + df['C'])
with pytest.raises(KeyError):
df.assign(C=df.A, D=lambda x: x['A'] + x['C'])
def test_insert_error_msmgs(self):
# GH 7432
df = DataFrame({'foo': ['a', 'b', 'c'], 'bar': [
1, 2, 3], 'baz': ['d', 'e', 'f']}).set_index('foo')
s = DataFrame({'foo': ['a', 'b', 'c', 'a'], 'fiz': [
'g', 'h', 'i', 'j']}).set_index('foo')
msg = 'cannot reindex from a duplicate axis'
with tm.assert_raises_regex(ValueError, msg):
df['newcol'] = s
# GH 4107, more descriptive error message
df = DataFrame(np.random.randint(0, 2, (4, 4)),
columns=['a', 'b', 'c', 'd'])
msg = 'incompatible index of inserted column with frame index'
with tm.assert_raises_regex(TypeError, msg):
df['gr'] = df.groupby(['b', 'c']).count()
def test_insert_benchmark(self):
# from the vb_suite/frame_methods/frame_insert_columns
N = 10
K = 5
df = DataFrame(index=lrange(N))
new_col = np.random.randn(N)
for i in range(K):
df[i] = new_col
expected = DataFrame(np.repeat(new_col, K).reshape(N, K),
index=lrange(N))
assert_frame_equal(df, expected)
def test_insert(self):
df = DataFrame(np.random.randn(5, 3), index=np.arange(5),
columns=['c', 'b', 'a'])
df.insert(0, 'foo', df['a'])
tm.assert_index_equal(df.columns, Index(['foo', 'c', 'b', 'a']))
tm.assert_series_equal(df['a'], df['foo'], check_names=False)
df.insert(2, 'bar', df['c'])
tm.assert_index_equal(df.columns,
Index(['foo', 'c', 'bar', 'b', 'a']))
tm.assert_almost_equal(df['c'], df['bar'], check_names=False)
# diff dtype
# new item
df['x'] = df['a'].astype('float32')
result = Series(dict(float64=5, float32=1))
assert (df.get_dtype_counts() == result).all()
# replacing current (in different block)
df['a'] = df['a'].astype('float32')
result = Series(dict(float64=4, float32=2))
assert (df.get_dtype_counts() == result).all()
df['y'] = df['a'].astype('int32')
result = Series(dict(float64=4, float32=2, int32=1))
assert (df.get_dtype_counts() == result).all()
with tm.assert_raises_regex(ValueError, 'already exists'):
df.insert(1, 'a', df['b'])
pytest.raises(ValueError, df.insert, 1, 'c', df['b'])
df.columns.name = 'some_name'
# preserve columns name field
df.insert(0, 'baz', df['c'])
assert df.columns.name == 'some_name'
# GH 13522
df = DataFrame(index=['A', 'B', 'C'])
df['X'] = df.index
df['X'] = ['x', 'y', 'z']
exp = DataFrame(data={'X': ['x', 'y', 'z']}, index=['A', 'B', 'C'])
assert_frame_equal(df, exp)
def test_delitem(self):
del self.frame['A']
assert 'A' not in self.frame
def test_delitem_multiindex(self):
midx = MultiIndex.from_product([['A', 'B'], [1, 2]])
df = DataFrame(np.random.randn(4, 4), columns=midx)
assert len(df.columns) == 4
assert ('A', ) in df.columns
assert 'A' in df.columns
result = df['A']
assert isinstance(result, DataFrame)
del df['A']
assert len(df.columns) == 2
# A still in the levels, BUT get a KeyError if trying
# to delete
assert ('A', ) not in df.columns
with pytest.raises(KeyError):
del df[('A',)]
# xref: https://github.com/pandas-dev/pandas/issues/2770
# the 'A' is STILL in the columns!
assert 'A' in df.columns
with pytest.raises(KeyError):
del df['A']
def test_pop(self):
self.frame.columns.name = 'baz'
self.frame.pop('A')
assert 'A' not in self.frame
self.frame['foo'] = 'bar'
self.frame.pop('foo')
assert 'foo' not in self.frame
# TODO assert self.frame.columns.name == 'baz'
# gh-10912: inplace ops cause caching issue
a = DataFrame([[1, 2, 3], [4, 5, 6]], columns=[
'A', 'B', 'C'], index=['X', 'Y'])
b = a.pop('B')
b += 1
# original frame
expected = DataFrame([[1, 3], [4, 6]], columns=[
'A', 'C'], index=['X', 'Y'])
tm.assert_frame_equal(a, expected)
# result
expected = Series([2, 5], index=['X', 'Y'], name='B') + 1
tm.assert_series_equal(b, expected)
def test_pop_non_unique_cols(self):
df = DataFrame({0: [0, 1], 1: [0, 1], 2: [4, 5]})
df.columns = ["a", "b", "a"]
res = df.pop("a")
assert type(res) == DataFrame
assert len(res) == 2
assert len(df.columns) == 1
assert "b" in df.columns
assert "a" not in df.columns
assert len(df.index) == 2
def test_insert_column_bug_4032(self):
# GH4032, inserting a column and renaming causing errors
df = DataFrame({'b': [1.1, 2.2]})
df = df.rename(columns={})
df.insert(0, 'a', [1, 2])
result = df.rename(columns={})
str(result)
expected = DataFrame([[1, 1.1], [2, 2.2]], columns=['a', 'b'])
assert_frame_equal(result, expected)
df.insert(0, 'c', [1.3, 2.3])
result = df.rename(columns={})
str(result)
expected = DataFrame([[1.3, 1, 1.1], [2.3, 2, 2.2]],
columns=['c', 'a', 'b'])
assert_frame_equal(result, expected)
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nicira Networks, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Dan Wendlandt, Nicira, Inc
#
from abc import abstractmethod
from oslo.config import cfg
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.common import exceptions as qexception
from neutron import manager
from neutron import quota
# L3 Exceptions
class RouterNotFound(qexception.NotFound):
message = _("Router %(router_id)s could not be found")
class RouterInUse(qexception.InUse):
message = _("Router %(router_id)s still has active ports")
class RouterInterfaceNotFound(qexception.NotFound):
message = _("Router %(router_id)s does not have "
"an interface with id %(port_id)s")
class RouterInterfaceNotFoundForSubnet(qexception.NotFound):
message = _("Router %(router_id)s has no interface "
"on subnet %(subnet_id)s")
class RouterInterfaceInUseByFloatingIP(qexception.InUse):
message = _("Router interface for subnet %(subnet_id)s on router "
"%(router_id)s cannot be deleted, as it is required "
"by one or more floating IPs.")
class FloatingIPNotFound(qexception.NotFound):
message = _("Floating IP %(floatingip_id)s could not be found")
class ExternalGatewayForFloatingIPNotFound(qexception.NotFound):
message = _("External network %(external_network_id)s is not reachable "
"from subnet %(subnet_id)s. Therefore, cannot associate "
"Port %(port_id)s with a Floating IP.")
class FloatingIPPortAlreadyAssociated(qexception.InUse):
message = _("Cannot associate floating IP %(floating_ip_address)s "
"(%(fip_id)s) with port %(port_id)s "
"using fixed IP %(fixed_ip)s, as that fixed IP already "
"has a floating IP on external network %(net_id)s.")
class L3PortInUse(qexception.InUse):
message = _("Port %(port_id)s has owner %(device_owner)s and therefore"
" cannot be deleted directly via the port API.")
class ExternalNetworkInUse(qexception.InUse):
message = _("External network %(net_id)s cannot be updated to be made "
"non-external, since it has existing gateway ports")
class RouterExternalGatewayInUseByFloatingIp(qexception.InUse):
message = _("Gateway cannot be updated for router %(router_id)s, since a "
"gateway to external network %(net_id)s is required by one or "
"more floating IPs.")
ROUTERS = 'routers'
EXTERNAL_GW_INFO = 'external_gateway_info'
RESOURCE_ATTRIBUTE_MAP = {
ROUTERS: {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': None},
'is_visible': True},
EXTERNAL_GW_INFO: {'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'enforce_policy': True}
},
'floatingips': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'floating_ip_address': {'allow_post': False, 'allow_put': False,
'validate': {'type:ip_address_or_none': None},
'is_visible': True},
'floating_network_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'router_id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid_or_none': None},
'is_visible': True, 'default': None},
'port_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid_or_none': None},
'is_visible': True, 'default': None},
'fixed_ip_address': {'allow_post': True, 'allow_put': True,
'validate': {'type:ip_address_or_none': None},
'is_visible': True, 'default': None},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': None},
'is_visible': True}
},
}
EXTERNAL = 'router:external'
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {EXTERNAL: {'allow_post': True,
'allow_put': True,
'default': attr.ATTR_NOT_SPECIFIED,
'is_visible': True,
'convert_to': attr.convert_to_boolean,
'enforce_policy': True,
'required_by_policy': True}}}
l3_quota_opts = [
cfg.IntOpt('quota_router',
default=10,
help=_('Number of routers allowed per tenant, -1 for '
'unlimited')),
cfg.IntOpt('quota_floatingip',
default=50,
help=_('Number of floating IPs allowed per tenant, '
'-1 for unlimited')),
]
cfg.CONF.register_opts(l3_quota_opts, 'QUOTAS')
class L3(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "Neutron L3 Router"
@classmethod
def get_alias(cls):
return "router"
@classmethod
def get_description(cls):
return ("Router abstraction for basic L3 forwarding"
" between L2 Neutron networks and access to external"
" networks via a NAT gateway.")
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/neutron/router/api/v1.0"
@classmethod
def get_updated(cls):
return "2012-07-20T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
my_plurals = [(key, key[:-1]) for key in RESOURCE_ATTRIBUTE_MAP.keys()]
attr.PLURALS.update(dict(my_plurals))
exts = []
plugin = manager.NeutronManager.get_plugin()
for resource_name in ['router', 'floatingip']:
collection_name = resource_name + "s"
params = RESOURCE_ATTRIBUTE_MAP.get(collection_name, dict())
member_actions = {}
if resource_name == 'router':
member_actions = {'add_router_interface': 'PUT',
'remove_router_interface': 'PUT'}
quota.QUOTAS.register_resource_by_name(resource_name)
controller = base.create_resource(
collection_name, resource_name, plugin, params,
member_actions=member_actions,
allow_pagination=cfg.CONF.allow_pagination,
allow_sorting=cfg.CONF.allow_sorting)
ex = extensions.ResourceExtension(collection_name,
controller,
member_actions=member_actions,
attr_map=params)
exts.append(ex)
return exts
def update_attributes_map(self, attributes):
super(L3, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
if version == "2.0":
return dict(EXTENDED_ATTRIBUTES_2_0.items() +
RESOURCE_ATTRIBUTE_MAP.items())
else:
return {}
class RouterPluginBase(object):
@abstractmethod
def create_router(self, context, router):
pass
@abstractmethod
def update_router(self, context, id, router):
pass
@abstractmethod
def get_router(self, context, id, fields=None):
pass
@abstractmethod
def delete_router(self, context, id):
pass
@abstractmethod
def get_routers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None, page_reverse=False):
pass
@abstractmethod
def add_router_interface(self, context, router_id, interface_info):
pass
@abstractmethod
def remove_router_interface(self, context, router_id, interface_info):
pass
@abstractmethod
def create_floatingip(self, context, floatingip):
pass
@abstractmethod
def update_floatingip(self, context, id, floatingip):
pass
@abstractmethod
def get_floatingip(self, context, id, fields=None):
pass
@abstractmethod
def delete_floatingip(self, context, id):
pass
@abstractmethod
def get_floatingips(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
def get_routers_count(self, context, filters=None):
raise qexception.NotImplementedError()
def get_floatingips_count(self, context, filters=None):
raise qexception.NotImplementedError()
| |
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
from neuroanalysis.miesnwb import MiesNwb
from ..signal import SignalBlock
from .sweep_view import SweepView
from .analyzer_view import AnalyzerView
from ...util.merge_lists import merge_lists
class MiesNwbExplorer(QtGui.QSplitter):
"""Widget for listing and selecting recordings in a MIES-generated NWB file.
"""
selection_changed = QtCore.Signal(object)
channels_changed = QtCore.Signal(object)
check_state_changed = QtCore.Signal(object)
def __init__(self, nwb=None):
QtGui.QSplitter.__init__(self)
self.setOrientation(QtCore.Qt.Vertical)
self._nwb = None
self._channel_selection = {}
self._sel_box = QtGui.QWidget()
self._sel_box_layout = QtGui.QHBoxLayout()
self._sel_box_layout.setContentsMargins(0, 0, 0, 0)
self._sel_box.setLayout(self._sel_box_layout)
self.addWidget(self._sel_box)
self.sweep_tree = QtGui.QTreeWidget()
columns = ['ID', 'Stim Name', 'Clamp Mode', 'Holding V', 'Holding I']
self.sweep_tree.setColumnCount(len(columns))
self.sweep_tree.setHeaderLabels(columns)
self.sweep_tree.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self._sel_box_layout.addWidget(self.sweep_tree)
self.channel_list = QtGui.QListWidget()
self.channel_list.setMaximumWidth(50)
self._sel_box_layout.addWidget(self.channel_list)
self.channel_list.itemChanged.connect(self._channel_list_changed)
self.meta_tree = QtGui.QTreeWidget()
self.addWidget(self.meta_tree)
self.set_nwb(nwb)
self.sweep_tree.itemSelectionChanged.connect(self._selection_changed)
self.sweep_tree.itemChanged.connect(self._tree_item_changed)
def set_nwb(self, nwb):
self._nwb = nwb
self._channel_selection = {}
self.update_sweep_tree()
def update_sweep_tree(self):
self.sweep_tree.clear()
if self._nwb is None:
return
for i,sweep in enumerate(self._nwb.contents):
recs = sweep.recordings
stim = recs[0].stimulus
stim_name = '' if stim is None else stim.description
modes = ''
V_holdings = ''
I_holdings = ''
for rec in sweep.recordings:
if not hasattr(rec, 'clamp_mode'):
modes += "-"
V_holdings += "-"
I_holdings += "-"
continue
if rec.clamp_mode == 'vc':
modes += 'V'
else:
modes += 'I'
hp = rec.rounded_holding_potential
if hp is not None:
V_holdings += '%d '% (int(hp*1000))
else:
V_holdings += '?? '
hc = rec.holding_current
if hc is not None:
I_holdings += '%d '% (int(hc*1e12))
else:
I_holdings += '?? '
item = QtGui.QTreeWidgetItem([str(i), stim_name, modes, V_holdings, I_holdings])
item.setCheckState(0, QtCore.Qt.Unchecked)
item.data = sweep
self.sweep_tree.addTopLevelItem(item)
self.sweep_tree.header().resizeSections(QtGui.QHeaderView.ResizeToContents)
def selection(self):
"""Return a list of selected groups and/or sweeps.
"""
items = self.sweep_tree.selectedItems()
selection = []
for item in items:
if item.parent() in items:
continue
selection.append(item.data)
return selection
def checked_items(self, _root=None):
"""Return a list of items that have been checked.
"""
if _root is None:
_root = self.sweep_tree.invisibleRootItem()
checked = []
if _root.checkState(0) == QtCore.Qt.Checked:
checked.append(_root.data)
for i in range(_root.childCount()):
checked.extend(self.checked_items(_root.child(i)))
return checked
def selected_channels(self):
chans = []
for i in range(self.channel_list.count()):
item = self.channel_list.item(i)
if item.checkState() == QtCore.Qt.Checked:
chans.append(item.channel_index)
return chans
def _update_channel_list(self):
self.channel_list.itemChanged.disconnect(self._channel_list_changed)
try:
# clear channel list
while self.channel_list.count() > 0:
self.channel_list.takeItem(0)
# bail out if nothing is selected
sel = self.selection()
if len(sel) == 0:
return
# get a list of all channels across all selected items
channels = []
for item in sel:
#if isinstance(item, SweepGroup):
#if len(item.sweeps) == 0:
#continue
#item = item.sweeps[0]
channels.extend(item.devices)
channels = list(set(channels))
channels.sort()
# add new items to the channel list, all selected
for ch in channels:
item = QtGui.QListWidgetItem(str(ch))
item.channel_index = ch
self._channel_selection.setdefault(ch, True)
# restore previous check state, if any.
checkstate = QtCore.Qt.Checked if self._channel_selection.setdefault(ch, True) else QtCore.Qt.Unchecked
item.setCheckState(checkstate)
self.channel_list.addItem(item)
finally:
self.channel_list.itemChanged.connect(self._channel_list_changed)
def _selection_changed(self):
sel = self.selection()
if len(sel) == 1:
sweep = sel[0]
self.meta_tree.setColumnCount(len(sweep.devices)+1)
self.meta_tree.setHeaderLabels([""] + [str(dev) for dev in sweep.devices])
self.meta_tree.clear()
self._populate_meta_tree([dev.all_meta for dev in sweep.recordings], self.meta_tree.invisibleRootItem())
for i in range(self.meta_tree.columnCount()):
self.meta_tree.resizeColumnToContents(i)
else:
self.meta_tree.clear()
self._update_channel_list()
self.selection_changed.emit(sel)
def _populate_meta_tree(self, meta, root):
keys = list(meta[0].keys())
for m in meta[1:]:
keys = merge_lists(keys, list(m.keys()))
for k in keys:
vals = [m.get(k) for m in meta]
if isinstance(vals[0], dict):
item = QtGui.QTreeWidgetItem([k] + [''] * len(meta))
self._populate_meta_tree(vals, item)
else:
item = QtGui.QTreeWidgetItem([k] + [str(v) for v in vals])
root.addChild(item)
def _tree_item_changed(self, item, col):
if col != 0:
return
self.check_state_changed.emit(self)
def _channel_list_changed(self, item):
self.channels_changed.emit(self.selected_channels())
self._channel_selection[item.channel_index] = item.checkState() == QtCore.Qt.Checked
class MiesNwbViewer(QtGui.QWidget):
"""Combination of a MiesNwvExplorer for selecting sweeps and a tab widget
containing multiple views, each performing a different analysis.
"""
analyzer_changed = QtCore.Signal(object)
def __init__(self, nwb=None):
QtGui.QWidget.__init__(self)
self.nwb = nwb
self.layout = QtGui.QGridLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(self.layout)
self.hsplit = QtGui.QSplitter()
self.hsplit.setOrientation(QtCore.Qt.Horizontal)
self.layout.addWidget(self.hsplit, 0, 0)
self.vsplit = QtGui.QSplitter()
self.vsplit.setOrientation(QtCore.Qt.Vertical)
self.hsplit.addWidget(self.vsplit)
self.explorer = MiesNwbExplorer(self.nwb)
self.explorer.selection_changed.connect(self.data_selection_changed)
self.explorer.channels_changed.connect(self.data_selection_changed)
self.vsplit.addWidget(self.explorer)
self.ptree = pg.parametertree.ParameterTree()
self.vsplit.addWidget(self.ptree)
self.tabs = QtGui.QTabWidget()
self.hsplit.addWidget(self.tabs)
self.reload_btn = QtGui.QPushButton("Reload views")
self.reload_btn.clicked.connect(self.reload_views)
self.vsplit.addWidget(self.reload_btn)
self.views = []
self.create_views()
self.resize(1400, 800)
self.hsplit.setSizes([600, 800])
self.tab_changed()
self.tabs.currentChanged.connect(self.tab_changed)
def set_nwb(self, nwb):
self.nwb = nwb
self.explorer.set_nwb(nwb)
def data_selection_changed(self):
sweeps = self.selected_sweeps()
chans = self.selected_channels()
with pg.BusyCursor():
self.tabs.currentWidget().data_selected(sweeps, chans)
def tab_changed(self):
w = self.tabs.currentWidget()
if w is None:
self.ptree.clear()
return
self.ptree.setParameters(w.params, showTop=False)
sweeps = self.selected_sweeps()
chans = self.selected_channels()
w.data_selected(sweeps, chans)
self.analyzer_changed.emit(self)
def selected_analyzer(self):
return self.tabs.currentWidget()
def selected_sweeps(self, selection=None):
if selection is None:
selection = self.explorer.selection()
sweeps = []
for item in selection:
#if isinstance(item, SweepGroup):
#sweeps.extend(item.sweeps)
#else:
sweeps.append(item)
return sweeps
def checked_sweeps(self):
selection = self.explorer.checked_items()
sweeps = []
for item in selection:
#if isinstance(item, SweepGroup):
#sweeps.extend(item.sweeps)
#else:
sweeps.append(item)
return sweeps
def selected_channels(self):
return self.explorer.selected_channels()
def reload_views(self):
"""Remove all existing views, reload their source code, and create new
views.
"""
self.clear_views()
pg.reload.reloadAll(debug=True)
self.create_views()
def clear_views(self):
with SignalBlock(self.tabs.currentChanged, self.tab_changed):
self.tabs.clear()
self.views = []
def create_views(self):
self.clear_views()
self.views = [
('Sweep', SweepView(self)),
('Sandbox', AnalyzerView(self)),
]
for name, view in self.views:
self.tabs.addTab(view, name)
class AnalysisView(QtGui.QWidget):
"""Example skeleton for an analysis view.
"""
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
# Views must have self.params
# This implements the controls that are unique to this view.
self.params = pg.parametertree.Parameter(name='params', type='group', children=[
{'name': 'lowpass', 'type': 'float', 'value': 0, 'limits': [0, None], 'step': 1},
{'name': 'average', 'type': 'bool', 'value': False},
])
self.params.sigTreeStateChanged.connect(self._update_analysis)
def data_selected(self, sweeps, channels):
"""Called when the user selects a different set of sweeps.
"""
self.sweeps = sweeps
self.channels = channels
self.update_analysis()
def update_analysis(self, param, changes):
"""Called when the user changes control parameters.
"""
pass
if __name__ == '__main__':
import sys
from pprint import pprint
pg.dbg()
filename = sys.argv[1]
nwb = MiesNwb(filename)
# sweeps = nwb.sweeps()
# traces = sweeps[0].traces()
# # pprint(traces[0].meta())
# groups = nwb.sweep_groups()
# for i,g in enumerate(groups):
# print "--------", i, g
# print g.describe()
# d = groups[7].data()
# print d.shape
app = pg.mkQApp()
w = MiesNwbViewer(nwb)
w.show()
# w.show_group(7)
| |
import itertools
from collections import namedtuple
from terml.nodes import Term, Tag, coerceToTerm
try:
basestring
except NameError:
basestring = str
class QTerm(namedtuple("QTerm", "functor data args span")):
"""
A quasiterm, representing a template or pattern for a term tree.
"""
@property
def tag(self):
return self.functor.tag
def _substitute(self, map):
candidate = self.functor._substitute(map)[0]
args = tuple(itertools.chain.from_iterable(a._substitute(map) for a in self.args))
term = Term(candidate.tag, candidate.data, args, self.span)
return [term]
def substitute(self, map):
"""
Fill $-holes with named values.
@param map: A mapping of names to values to be inserted into
the term tree.
"""
return self._substitute(map)[0]
def match(self, specimen, substitutionArgs=()):
"""
Search a term tree for matches to this pattern. Returns a
mapping of names to matched values.
@param specimen: A term tree to extract values from.
"""
bindings = {}
if self._match(substitutionArgs, [specimen], bindings, (), 1) == 1:
return bindings
raise TypeError("%r doesn't match %r" % (self, specimen))
def _reserve(self):
return 1
def _match(self, args, specimens, bindings, index, max):
if not specimens:
return -1
spec = self._coerce(specimens[0])
if spec is None:
return -1
matches = self.functor._match(args, [spec.withoutArgs()], bindings, index, 1)
if not matches:
return -1
if matches > 1:
raise TypeError("Functor may only match 0 or 1 specimen")
num = matchArgs(self.args, spec.args, args, bindings, index, len(spec.args))
if len(spec.args) == num:
if max >= 1:
return 1
return -1
def _coerce(self, spec):
if isinstance(spec, Term):
newf = coerceToQuasiMatch(spec.withoutArgs(),
self.functor.isFunctorHole,
self.tag)
if newf is None:
return None
return Term(newf.asFunctor(), None, spec.args, None)
else:
return coerceToQuasiMatch(spec, self.functor.isFunctorHole,
self.tag)
def __eq__(self, other):
return ( self.functor, self.data, self.args
) == (other.functor, other.data, other.args)
def asFunctor(self):
if self.args:
raise ValueError("Terms with args can't be used as functors")
else:
return self.functor
class QFunctor(namedtuple("QFunctor", "tag data span")):
isFunctorHole = False
def _reserve(self):
return 1
@property
def name(self):
return self.tag.name
def _unparse(self, indentLevel=0):
return self.tag._unparse(indentLevel)
def _substitute(self, map):
return [Term(self.tag, self.data, None, self.span)]
def _match(self, args, specimens, bindings, index, max):
if not specimens:
return -1
spec = coerceToQuasiMatch(specimens[0], False, self.tag)
if spec is None:
return -1
if self.data is not None and self.data != spec.data:
return -1
if max >= 1:
return 1
return -1
def asFunctor(self):
return self
def matchArgs(quasiArglist, specimenArglist, args, bindings, index, max):
specs = specimenArglist
reserves = [q._reserve() for q in quasiArglist]
numConsumed = 0
for i, qarg in enumerate(quasiArglist):
num = qarg._match(args, specs, bindings, index, max - sum(reserves[i + 1:]))
if num == -1:
return -1
specs = specs[num:]
max -= num
numConsumed += num
return numConsumed
def coerceToQuasiMatch(val, isFunctorHole, tag):
if isFunctorHole:
if val is None:
result = Term(Tag("null"), None, None, None)
elif isinstance(val, Term):
if len(val.args) != 0:
return None
else:
result = val
elif isinstance(val, basestring):
result = Term(Tag(val), None, None, None)
elif isinstance(val, bool):
result = Term(Tag(["false", "true"][val]), None, None, None)
else:
return None
else:
result = coerceToTerm(val)
if tag is not None and result.tag != tag:
return None
return result
class _Hole(namedtuple("_Hole", "tag name isFunctorHole")):
def _reserve(self):
return 1
def __repr__(self):
return "term('%s')" % (self._unparse(4).replace("'", "\\'"))
def match(self, specimen, substitutionArgs=()):
bindings = {}
if self._match(substitutionArgs, [specimen], bindings, (), 1) != -1:
return bindings
raise TypeError("%r doesn't match %r" % (self, specimen))
def _multiget(args, holenum, index, repeat):
result = args[holenum]
for i in index:
if not isinstance(result, list):
return result
result = result[i]
return result
def _multiput(bindings, holenum, index, newval):
bits = bindings
dest = holenum
for it in index:
next = bits[dest]
if next is None:
next = {}
bits[dest] = next
bits = next
dest = it
result = None
if dest in bits:
result = bits[dest]
bits[dest] = newval
return result
class ValueHole(_Hole):
def _unparse(self, indentLevel=0):
return "${%s}" % (self.name,)
def _substitute(self, map):
termoid = map[self.name]
val = coerceToQuasiMatch(termoid, self.isFunctorHole, self.tag)
if val is None:
raise TypeError("%r doesn't match %r" % (termoid, self))
return [val]
def asFunctor(self):
if self.isFunctorHole:
return self
else:
return ValueHole(self.tag, self.name, True)
class PatternHole(_Hole):
def _unparse(self, indentLevel=0):
if self.tag:
return "%s@{%s}" % (self.tag.name, self.name)
else:
return "@{%s}" % (self.name,)
def _match(self, args, specimens, bindings, index, max):
if not specimens:
return -1
spec = coerceToQuasiMatch(specimens[0], self.isFunctorHole, self.tag)
if spec is None:
return -1
oldval = _multiput(bindings, self.name, index, spec)
if oldval is None or oldval != spec:
if max >= 1:
return 1
return -1
def asFunctor(self):
if self.isFunctorHole:
return self
else:
return PatternHole(self.tag, self.name, True)
class QSome(namedtuple("_QSome", "value quant")):
def _reserve(self):
if self.quant == "+":
return 1
else:
return 0
| |
#!/usr/bin/python
#
# Copyright 2016 Andreas Moser <grrrrrrrrr@surfsup.at>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple bindings for Hyperscan."""
import hyperscan_lib
class Hyperscan(object):
"""The Hyperscan class."""
def __init__(self, patterns=None, flags=None, mode=None):
"""Init.
Args:
patterns: A list of patterns to scan for.
flags: A list of Hyperscan flags, one for each pattern. If not passed, a
default of HS_FLAG_DOTALL is used for every pattern.
mode: The scanning mode, HS_MODE_BLOCK or HS_MODE_STREAM. Defaults to
HS_MODE_BLOCK.
Raises:
ValueError: Arguments could not be used.
"""
self._ffi, self._hs = hyperscan_lib.InitHyperscanLib()
if not patterns:
raise ValueError("Must give some patterns to scan for!")
if flags and len(patterns) != len(flags):
raise ValueError("Need one flags entry for each pattern!")
if not flags:
flags = [self._hs.HS_FLAG_DOTALL] * len(patterns)
if mode is None:
mode = self._hs.HS_MODE_BLOCK
self.patterns = patterns
self.flags = flags
self.mode = mode
self._CompilePatterns(self.patterns, self.flags, self.mode)
def _CompilePatterns(self, patterns, flags, mode):
"""Compiles the patterns/flags given into a database."""
try:
res = self._hs.hs_free_database(self._database_p[0])
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Unable to free database (%d)!" % res)
except AttributeError:
pass
cffi_patterns = [self._ffi.new("char []", pattern) for pattern in patterns]
cffi_array = self._ffi.new("char *[]", cffi_patterns)
cffi_flags = self._ffi.new("int []", flags)
cffi_flags_p = self._ffi.cast("unsigned int *", cffi_flags)
database_p = self._ffi.new("hs_database_t **")
compile_error_p = self._ffi.new("hs_compile_error_t **")
res = self._hs.hs_compile_multi(cffi_array, cffi_flags_p,
self._ffi.cast("unsigned int *", 0),
len(patterns), mode,
self._ffi.cast("hs_platform_info_t *", 0),
database_p, compile_error_p)
if res != self._hs.HS_SUCCESS:
msg = "Compile error: %s" % self._ffi.string(compile_error_p[0].message)
self._hs.hs_free_compile_error(compile_error_p[0])
raise RuntimeError(msg)
self._database_p = database_p
def _AllocateScratch(self):
scratch_pp = self._ffi.new("hs_scratch_t **")
res = self._hs.hs_alloc_scratch(self._database_p[0], scratch_pp)
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Error while allocating scratch!")
return scratch_pp
def _FreeScratch(self, scratch_p):
res = self._hs.hs_free_scratch(scratch_p[0])
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Error while freeing scratch!")
return True
def _EnsureMode(self, mode):
if self.mode != mode:
self.mode = mode
self._CompilePatterns(self.patterns, self.flags, self.mode)
def ScanBlock(self, data, callback=None):
"""Scans a single block of data for the patterns."""
self._EnsureMode(self._hs.HS_MODE_BLOCK)
scratch_p = self._AllocateScratch()
hits = []
@self._ffi.callback(
"int(unsigned int id, unsigned long long from, unsigned long long to, "
"unsigned int flags, void *ctx)")
def _MatchCallback(pat_id, from_off, to_off, flags, ctx):
if not hits:
hits.append(True)
if callback:
ret = callback(pat_id, from_off, to_off, flags, ctx)
if isinstance(ret, (int, long)):
return ret
return 0
self._hs.hs_scan(self._database_p[0], data, len(data), 0, scratch_p[0],
_MatchCallback, self._ffi.cast("void *", 0))
res = self._hs.hs_free_scratch(scratch_p[0])
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Error freeing scratch (%d)!" % res)
return bool(hits)
def OpenStream(self, callback):
"""Opens a stream for scanning."""
try:
_ = self._stream_p
raise RuntimeError("There is already an open stream.")
except AttributeError:
pass
self._EnsureMode(self._hs.HS_MODE_STREAM)
scratch_p = self._AllocateScratch()
stream_p = self._ffi.new("hs_stream_t **")
res = self._hs.hs_open_stream(self._database_p[0], 0, stream_p)
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Could not open stream (%d)!" % res)
@self._ffi.callback(
"int(unsigned int id, unsigned long long from, unsigned long long to, "
"unsigned int flags, void *ctx)")
def _MatchCallback(pat_id, from_off, to_off, flags, ctx):
if callback:
ret = callback(pat_id, from_off, to_off, flags, ctx)
if isinstance(ret, (int, long)):
return ret
return 0
self._stream_callback = _MatchCallback
self._stream_p = stream_p
self._scratch_p = scratch_p
return self
def StreamScan(self, data):
try:
self._stream_p
except AttributeError:
raise RuntimeError("Stream has not been started yet.")
res = self._hs.hs_scan_stream(self._stream_p[0], data, len(data), 0,
self._scratch_p[0], self._stream_callback,
self._ffi.cast("void *", 0))
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Error while scanning (%d)!" % res)
def CloseStream(self):
"""Closes the stream opened by OpenStream."""
try:
self._stream_p
except AttributeError:
raise RuntimeError("Stream has not been started yet.")
res = self._hs.hs_close_stream(self._stream_p[0], self._scratch_p[0],
self._stream_callback,
self._ffi.cast("void *", 0))
if res != self._hs.HS_SUCCESS:
raise RuntimeError("Error while closing stream (%d)!" % res)
self._FreeScratch(self._scratch_p)
del self._scratch_p
del self._stream_p
del self._stream_callback
def __del__(self):
try:
self._hs.hs_free_database(self._database_p[0])
except AttributeError:
pass
def __enter__(self):
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
self.CloseStream()
| |
r"""wamp is a module that provide classes that extend any
WAMP related class for the purpose of vtkWeb.
"""
import inspect, types, string, random, logging, six, json, re, base64
from threading import Timer
from twisted.web import resource
from twisted.python import log
from twisted.internet import reactor
from twisted.internet import defer
from twisted.internet.defer import Deferred, returnValue
from autobahn import wamp
from autobahn import util
from autobahn.wamp import types
from autobahn.wamp import auth
from autobahn.wamp import register as exportRpc
from autobahn.twisted.wamp import ApplicationSession, RouterSession
from autobahn.twisted.websocket import WampWebSocketServerFactory
from autobahn.twisted.websocket import WampWebSocketServerProtocol
from autobahn.twisted.websocket import WebSocketServerProtocol
from vtk.web import protocols
try:
from vtkWebCore import vtkWebApplication
except:
from vtkWebCorePython import vtkWebApplication
# =============================================================================
salt = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(32))
application = None
imageCapture = None
# =============================================================================
#
# Base class for vtkWeb WampServerProtocol
#
# =============================================================================
class ServerProtocol(ApplicationSession):
"""
Defines the core server protocol for vtkWeb. Adds support to
marshall/unmarshall RPC callbacks that involve ServerManager proxies as
arguments or return values.
Applications typically don't use this class directly, since it doesn't
register any RPC callbacks that are required for basic web-applications with
interactive visualizations. For that, use vtkWebServerProtocol.
"""
def __init__(self, config):
ApplicationSession.__init__(self, config)
self.vtkWebProtocols = []
self.authdb = None
self.secret = None
self.Application = self.initApplication()
self.initialize()
# Init Binary WebSocket image renderer
global imageCapture
imageCapture = protocols.vtkWebViewPortImageDelivery()
imageCapture.setApplication(self.Application)
def setAuthDB(self, db):
self.authdb = db
if self.secret:
self.authdb.updateKey('vtkweb', self.secret)
def initialize(self):
"""
Let the sub class define what they need to do to properly initialize
themselves.
"""
pass
def initApplication(self):
"""
Let subclass optionally initialize a custom application in lieu
of the default vtkWebApplication.
"""
global application
if not application:
application = vtkWebApplication()
return application
def onJoin(self, details):
ApplicationSession.onJoin(self, details)
self.register(self)
for protocol in self.vtkWebProtocols:
self.register(protocol)
def setApplication(self, application):
self.Application = application
# Init Binary WebSocket image renderer
global imageCapture
imageCapture.setApplication(self.Application)
def registerVtkWebProtocol(self, protocol):
protocol.coreServer = self
protocol.setApplication(self.Application)
self.vtkWebProtocols.append(protocol)
def getVtkWebProtocols(self):
return self.vtkWebProtocols
def updateSecret(self, newSecret):
self.secret = newSecret
if self.authdb:
self.authdb.updateKey('vtkweb', self.secret)
@exportRpc("application.exit")
def exit(self):
"""RPC callback to exit"""
reactor.stop()
@exportRpc("application.exit.later")
def exitLater(self, secondsLater=60):
"""RPC callback to exit after a short delay"""
reactor.callLater(secondsLater, reactor.stop)
# =============================================================================
#
# Base class for vtkWeb WampServerFactory
#
# =============================================================================
class TimeoutWampWebSocketServerFactory(WampWebSocketServerFactory):
"""
TimeoutWampWebSocketServerFactory is WampWebSocketServerFactory subclass
that adds support to close the web-server after a timeout when the last
connected client drops.
Currently, the protocol must call connectionMade() and connectionLost() methods
to notify the factory that the connection was started/closed.
If the connection count drops to zero, then the reap timer
is started which will end the process if no other connections are made in
the timeout interval.
"""
def __init__(self, factory, *args, **kwargs):
self._connection_count = 0
self._timeout = kwargs['timeout']
self._reaper = reactor.callLater(self._timeout, lambda: reactor.stop())
del kwargs['timeout']
WampWebSocketServerFactory.__init__(self, factory, *args, **kwargs)
WampWebSocketServerFactory.protocol = TimeoutWampWebSocketServerProtocol
def connectionMade(self):
if self._reaper:
log.msg("Client has reconnected, cancelling reaper", logLevel=logging.DEBUG)
self._reaper.cancel()
self._reaper = None
self._connection_count += 1
log.msg("on_connect: connection count = %s" % self._connection_count, logLevel=logging.DEBUG)
def connectionLost(self, reason):
if self._connection_count > 0:
self._connection_count -= 1
log.msg("connection_lost: connection count = %s" % self._connection_count, logLevel=logging.DEBUG)
if self._connection_count == 0 and not self._reaper:
log.msg("Starting timer, process will terminate in: %ssec" % self._timeout, logLevel=logging.DEBUG)
self._reaper = reactor.callLater(self._timeout, lambda: reactor.stop())
# =============================================================================
class TimeoutWampWebSocketServerProtocol(WampWebSocketServerProtocol):
def connectionMade(self):
WampWebSocketServerProtocol.connectionMade(self)
self.factory.connectionMade()
def connectionLost(self, reason):
WampWebSocketServerProtocol.connectionLost(self, reason)
self.factory.connectionLost(reason)
# =============================================================================
class AuthDb:
"""
An in-memory-only user database of a single user.
"""
AUTHEXTRA = {'salt': 'salt123', 'keylen': 32, 'iterations': 1000}
def __init__(self):
self._creds = {'vtkweb': auth.derive_key("vtkweb-secret", self.AUTHEXTRA['salt'])}
def get(self, authid):
## we return a deferred to simulate an asynchronous lookup
return defer.succeed(self._creds.get(authid, None))
def updateKey(self, id, newKey):
self._creds[id] = auth.derive_key(newKey, self.AUTHEXTRA['salt'])
# =============================================================================
class PendingAuth:
"""
Used for tracking pending authentications.
"""
def __init__(self, key, session, authid, authrole, authmethod, authprovider):
self.authid = authid
self.authrole = authrole
self.authmethod = authmethod
self.authprovider = authprovider
self.session = session
self.timestamp = util.utcnow()
self.nonce = util.newid()
challenge_obj = {
'authid': self.authid,
'authrole': self.authrole,
'authmethod': self.authmethod,
'authprovider': self.authprovider,
'session': self.session,
'nonce': self.nonce,
'timestamp': self.timestamp
}
self.challenge = json.dumps(challenge_obj)
self.signature = auth.compute_wcs(key, self.challenge)
# =============================================================================
class CustomWampCraRouterSession(RouterSession):
"""
A custom router session that authenticates via WAMP-CRA.
"""
def __init__(self, routerFactory):
"""
Constructor.
"""
RouterSession.__init__(self, routerFactory)
@defer.inlineCallbacks
def onHello(self, realm, details):
"""
Callback fired when client wants to attach session.
"""
self._pending_auth = None
if details.authmethods:
for authmethod in details.authmethods:
if authmethod == u"wampcra":
authdb = self.factory.authdb
## lookup user in user DB
key = yield authdb.get(details.authid)
## if user found ..
if key:
## setup pending auth
self._pending_auth = PendingAuth(key, details.pending_session,
details.authid, "user", authmethod, "authdb")
## send challenge to client
extra = { 'challenge': self._pending_auth.challenge }
## when using salted passwords, provide the client with
## the salt and then PBKDF2 parameters used
extra['salt'] = authdb.AUTHEXTRA['salt']
extra['iterations'] = 1000
extra['keylen'] = 32
defer.returnValue(types.Challenge('wampcra', extra))
## deny client
defer.returnValue(types.Deny())
def onAuthenticate(self, signature, extra):
"""
Callback fired when a client responds to an authentication challenge.
"""
## if there is a pending auth, and the signature provided by client matches ..
if self._pending_auth and signature == self._pending_auth.signature:
## accept the client
return types.Accept(authid = self._pending_auth.authid,
authrole = self._pending_auth.authrole,
authmethod = self._pending_auth.authmethod,
authprovider = self._pending_auth.authprovider)
## deny client
return types.Deny()
# =============================================================================
# Simple web server endpoint handling POST requests to execute rpc methods
# =============================================================================
class HttpRpcResource(resource.Resource, object):
def __init__(self, serverProtocol, endpointRootPath):
super(HttpRpcResource, self).__init__()
self.functionMap = {}
self.urlMatcher = re.compile(endpointRootPath.strip('/') + '/([^/]+)')
# Build the rpc method dictionary
protocolList = serverProtocol.getVtkWebProtocols()
protocolList.append(serverProtocol) # so the exit methods get "registered"
for protocolObject in protocolList:
test = lambda x: inspect.ismethod(x) or inspect.isfunction(x)
for k in inspect.getmembers(protocolObject.__class__, test):
proc = k[1]
if "_wampuris" in proc.__dict__:
pat = proc.__dict__["_wampuris"][0]
if pat.is_endpoint():
uri = pat.uri()
self.functionMap[uri] = (protocolObject, proc)
def extractRpcMethod(self, path):
m = self.urlMatcher.search(path)
if m:
return m.group(1)
else:
return None
def getChild(self, path, request):
return self
def render_POST(self, request):
payload = json.loads(request.content.getvalue())
args = payload['args']
methodName = self.extractRpcMethod(request.path)
obj,func = self.functionMap[methodName]
results = func(obj, *args)
return json.dumps(results)
# =============================================================================
# Binary WebSocket image push protocol
# =============================================================================
class ImagePushBinaryWebSocketServerProtocol(WebSocketServerProtocol):
def onOpen(self):
global imageCapture
self.helper = imageCapture
self.app = imageCapture.getApplication()
self.deltaT = 0.015
self.viewToCapture = {}
self.renderLoop = False
def onMessage(self, msg, isBinary):
request = json.loads(msg)
if 'view_id' in request:
viewId = str(request['view_id'])
if viewId not in self.viewToCapture:
self.viewToCapture[viewId] = { 'quality': 100, 'enabled': True, 'view': self.helper.getView(viewId), 'view_id': viewId, 'mtime': 0 }
# Update fields
objToUpdate = self.viewToCapture[viewId]
for key in request:
objToUpdate[key] = request[key]
# Trigger new render loop if needed
self.startRenderLoop()
def onClose(self, wasClean, code, reason):
self.viewToCapture = {}
self.renderLoop = False
def connectionLost(self, reason):
self.viewToCapture = []
self.renderLoop = False
def startRenderLoop(self):
if self.renderLoop:
return
self.renderLoop = True
reactor.callLater(self.deltaT, lambda: self.processNextRender())
def processNextRender(self):
keepGoing = False
for k, v in self.viewToCapture.iteritems():
if v['enabled']:
keepGoing = True
view = v['view']
if hasattr(view,'SMProxy'):
view = view.SMProxy
quality = v['quality']
mtime = v['mtime']
base64Image = self.app.StillRenderToString(view, mtime, quality)
if base64Image:
v['mtime'] = self.app.GetLastStillRenderToStringMTime()
meta = {
'size': self.app.GetLastStillRenderImageSize(),
'id': k
}
self.sendMessage(json.dumps(meta), False)
self.sendMessage(base64.standard_b64decode(base64Image), True)
self.renderLoop = keepGoing
if self.renderLoop:
reactor.callLater(self.deltaT, lambda: self.processNextRender())
| |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
import json
import logging
import os
import random
import re
from subprocess import CalledProcessError
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
and verifies that the return code and output are as expected. Throws AssertionError if
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except CalledProcessError as e:
if returncode != e.returncode:
raise AssertionError("Unexpected returncode %i" % e.returncode)
if output not in e.output:
raise AssertionError("Expected substring not found:" + e.output)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
timeout += time.time()
while attempt < attempts and time.time() < timeout:
if lock:
with lock:
if predicate():
return
else:
if predicate():
return
attempt += 1
time.sleep(0.05)
# Print the cause of the timeout
assert_greater_than(attempts, attempt)
assert_greater_than(timeout, time.time())
raise RuntimeError('Unreachable')
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, rpchost=None):
rpc_u, rpc_p = get_auth_cookie(datadir)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node" + str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("listenonion=0\n")
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def get_auth_cookie(datadir):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
with open(os.path.join(datadir, "bitcoin.conf"), 'r', encoding='utf8') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]
if user is None or password is None:
raise ValueError("No RPC credentials")
return user, password
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
for _ in range(50):
if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
break
time.sleep(0.1)
else:
raise AssertionError("timed out waiting for disconnect")
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]] * len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match + 1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
# Transaction/Block functions
#############################
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >= 0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{"txid": coinbase, "vout": 0}]
outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
| |
# -*- coding: utf-8 -*-
import flask
from functools import wraps
import logging
import database
import importscan
import attacks
#-----------------------------------------------------------------------------
# WEB SERVER
#-----------------------------------------------------------------------------
app = flask.Flask(__name__)
def get_project_db(pid):
"""
Get our project database.
"""
pdb = database.ProjectDatabase()
project = pdb.get_project(pid)
if project is None:
flask.abort(404)
return project
@app.route("/")
def index():
return flask.render_template('index.html')
@app.route("/about")
def about():
return flask.render_template('about.html')
@app.route("/project/<pid>/hosts")
def hosts(pid):
"""
Get summary inforation about all imported hosts.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
hosts = db.get_summary()
unique = db.get_unique()
return flask.render_template('hosts.html', pid=pid, name=project['name'],
hosts=hosts, unique=unique)
@app.route('/project/<pid>/host/<ip>', methods=['GET', 'POST'])
def host(pid, ip):
"""
Get all the information about a host.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
if flask.request.method == 'POST':
note = flask.request.form['note']
db.hostdb.update_host_note(ip, note)
data = db.get_host_details(ip)
if data is None:
flask.abort(404)
details = {}
for item in data['items']:
key = "{0}/{1}".format(item['port'], item['protocol'])
if details.get(key) is None:
details[key] = []
details[key].append(item['note'])
else:
details[key].append(item['note'])
keys = sorted(details.keys(), key=lambda x: int(x.split('/')[0]))
note = data['note']
return flask.render_template('host.html', pid=pid, host=ip,
details=details, keys=keys, note=note,
name=project['name'])
@app.route('/project/<pid>/host/notes')
def host_notes(pid):
"""
Display all host notes.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
notes = db.hostdb.get_host_notes()
return flask.render_template('notes.html', pid=pid, notes=notes,
name=project['name'])
@app.route('/project/<pid>/item/<item_id>')
def item(pid, item_id):
"""
Get all the information about an item.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
item = db.itemdb.get_item(item_id)
if item is None:
flask.abort(404)
return flask.render_template('item.html', pid=pid, item=item,
name=project['name'])
@app.route('/project/<pid>/attack/<aid>', methods=['GET', 'POST'])
def get_attack(pid, aid):
"""
Get list of all the hosts possibly vulnerable to the attack.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
if flask.request.method == 'POST':
note = flask.request.form['note']
db.attackdb.update_attack_note(aid, note)
attack = db.attackdb.get_attack(aid)
if attack is None:
flask.abort(404)
items = [i.split(':') for i in attack['items'].split(',')]
return flask.render_template('attack.html', pid=pid, attack=attack,
items=items, name=project['name'])
@app.route('/project/<pid>/import', methods=['GET', 'POST'])
def import_scan(pid):
"""
Import scan data into the database associated with the pid.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
if flask.request.method == 'GET':
files = db.importdb.get_imported_files()
return flask.render_template('import.html', pid=pid, files=files,
name=project['name'])
else:
i = importscan.Import(project['dbfile'])
scans = flask.request.files.getlist("scans[]")
for scan in scans:
res = i.import_scan(scan.read())
if res is True:
db.importdb.add_import_file(scan.filename)
a = attacks.Attack(project['dbfile'])
a.find_attacks()
return flask.redirect(flask.url_for('get_project', pid=pid))
@app.route('/project/<pid>/attack/notes')
def attack_notes(pid):
"""
Display all attack notes.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
notes = db.attackdb.get_attack_notes()
return flask.render_template('notes.html', pid=pid, notes=notes,
name=project['name'])
@app.route('/projects', methods=['GET', 'POST'])
def projects():
"""
Get a list of all projects.
"""
pdb = database.ProjectDatabase()
stats = {}
if flask.request.method == 'POST':
name = flask.request.form['project_name']
pdb.create_project(name)
project_list = pdb.get_projects()
for project in project_list:
db = database.ScanDatabase(project['dbfile'])
stats[project['id']] = db.get_stats()
return flask.render_template('projects.html', projects=project_list, stats=stats)
@app.route('/project/<pid>')
def get_project(pid):
"""
Get a project, including the list of hosts attacks.
"""
project = get_project_db(pid)
db = database.ScanDatabase(project['dbfile'])
attacks = db.attackdb.get_attacks()
return flask.render_template('project.html', pid=pid, note=project['note'],
name=project['name'], attacks=attacks)
@app.route('/project/<pid>/notes', methods=['GET', 'POST'])
def project_notes(pid):
"""
Display all project notes.
"""
pdb = database.ProjectDatabase()
project = get_project_db(pid)
if flask.request.method == 'POST':
note = flask.request.form['note']
pdb.update_project_note(pid, note)
return flask.redirect(flask.url_for('get_project', pid=pid))
else:
return flask.render_template('project_notes.html', pid=pid,
name=project['name'], note=project['note'])
@app.route('/project/<pid>/delete')
def delete_project(pid):
"""
Delete the specified project.
"""
pdb = database.ProjectDatabase()
project = pdb.delete_project(pid)
return flask.redirect(flask.url_for('projects'))
@app.errorhandler(404)
def page_not_found(e):
return flask.render_template('404.html'), 404
@app.errorhandler(500)
def inernal_error(e):
return flask.render_template('500.html'), 500
| |
'''
@author: shylent
'''
from tftp.datagram import (ACKDatagram, ERRORDatagram, OP_DATA, OP_ERROR, ERR_ILLEGAL_OP,
ERR_DISK_FULL, OP_ACK, DATADatagram, ERR_NOT_DEFINED,)
from tftp.util import SequentialCall
from twisted.internet import reactor
from twisted.internet.defer import maybeDeferred
from twisted.internet.protocol import DatagramProtocol
from twisted.python import log
MAX_BLOCK_SIZE = 8192
class WriteSession(DatagramProtocol):
"""Represents a transfer, during which we write to a local file. If we are a
server, this means, that we received a WRQ (write request). If we are a client,
this means, that we have requested a read from a remote server.
@cvar block_size: Expected block size. If a data chunk is received and its length
is less, than C{block_size}, it is assumed that that data chunk is the last in the
transfer. Default: 512 (as per U{RFC1350<http://tools.ietf.org/html/rfc1350>})
@type block_size: C{int}.
@cvar timeout: An iterable, that yields timeout values for every subsequent
ACKDatagram, that we've sent, that is not followed by the next data chunk.
When (if) the iterable is exhausted, the transfer is considered failed.
@type timeout: any iterable
@ivar started: whether or not this protocol has started
@type started: C{bool}
"""
block_size = 512
timeout = (1, 3, 7)
tsize = None
def __init__(self, writer, _clock=None):
self.writer = writer
self.blocknum = 0
self.completed = False
self.started = False
self.timeout_watchdog = None
if _clock is None:
self._clock = reactor
else:
self._clock = _clock
def cancel(self):
"""Cancel this session, discard any data, that was collected
and give up the connector.
"""
if self.timeout_watchdog is not None and self.timeout_watchdog.active():
self.timeout_watchdog.cancel()
self.writer.cancel()
self.transport.stopListening()
def startProtocol(self):
self.started = True
def connectionRefused(self):
if not self.completed:
self.writer.cancel()
self.transport.stopListening()
def datagramReceived(self, datagram):
if datagram.opcode == OP_DATA:
return self.tftp_DATA(datagram)
elif datagram.opcode == OP_ERROR:
log.msg("Got error: %s" % datagram)
self.cancel()
def tftp_DATA(self, datagram):
"""Handle incoming DATA TFTP datagram
@type datagram: L{DATADatagram}
"""
next_blocknum = self.blocknum + 1
if datagram.blocknum < next_blocknum:
self.transport.write(ACKDatagram(datagram.blocknum).to_wire())
elif datagram.blocknum == next_blocknum:
if self.completed:
self.transport.write(ERRORDatagram.from_code(
ERR_ILLEGAL_OP, "Transfer already finished").to_wire())
else:
return self.nextBlock(datagram)
else:
self.transport.write(ERRORDatagram.from_code(
ERR_ILLEGAL_OP, "Block number mismatch").to_wire())
def nextBlock(self, datagram):
"""Handle fresh data, attempt to write it to backend
@type datagram: L{DATADatagram}
"""
if self.timeout_watchdog is not None and self.timeout_watchdog.active():
self.timeout_watchdog.cancel()
self.blocknum += 1
d = maybeDeferred(self.writer.write, datagram.data)
d.addCallbacks(callback=self.blockWriteSuccess, callbackArgs=[datagram, ],
errback=self.blockWriteFailure)
return d
def blockWriteSuccess(self, ign, datagram):
"""The write was successful, respond with ACK for current block number
If this is the last chunk (received data length < block size), the protocol
will keep running until the end of current timeout period, so we can respond
to any duplicates.
@type datagram: L{DATADatagram}
"""
bytes = ACKDatagram(datagram.blocknum).to_wire()
self.timeout_watchdog = SequentialCall.run(self.timeout[:-1],
callable=self.sendData, callable_args=[bytes, ],
on_timeout=lambda: self._clock.callLater(self.timeout[-1], self.timedOut),
run_now=True,
_clock=self._clock
)
if len(datagram.data) < self.block_size:
self.completed = True
self.writer.finish()
# TODO: If self.tsize is not None, compare it with the actual
# count of bytes written. Log if there's a mismatch. Should it
# also emit an error datagram?
def blockWriteFailure(self, failure):
"""Write failed"""
log.err(failure)
self.transport.write(ERRORDatagram.from_code(ERR_DISK_FULL).to_wire())
self.cancel()
def timedOut(self):
"""Called when the protocol has timed out. Let the backend know, if the
the transfer was successful.
"""
if not self.completed:
log.msg("Timed out while waiting for next block")
self.writer.cancel()
else:
log.msg("Timed out after a successful transfer")
self.transport.stopListening()
def sendData(self, bytes):
"""Send data to the remote peer
@param bytes: bytes to send
@type bytes: C{str}
"""
self.transport.write(bytes)
class ReadSession(DatagramProtocol):
"""Represents a transfer, during which we read from a local file
(and write to the network). If we are a server, this means, that we've received
a RRQ (read request). If we are a client, this means that we've requested to
write to a remote server.
@cvar block_size: The data will be sent in chunks of this size. If we send
a chunk with the size < C{block_size}, the transfer will end.
Default: 512 (as per U{RFC1350<http://tools.ietf.org/html/rfc1350>})
@type block_size: C{int}
@cvar timeout: An iterable, that yields timeout values for every subsequent
unacknowledged DATADatagram, that we've sent. When (if) the iterable is exhausted,
the transfer is considered failed.
@type timeout: any iterable
@ivar started: whether or not this protocol has started
@type started: C{bool}
"""
block_size = 512
timeout = (1, 3, 7)
def __init__(self, reader, _clock=None):
self.reader = reader
self.blocknum = 0
self.started = False
self.completed = False
self.timeout_watchdog = None
if _clock is None:
self._clock = reactor
else:
self._clock = _clock
def cancel(self):
"""Tell the reader to give up the resources. Stop the timeout cycle
and disconnect the transport.
"""
self.reader.finish()
if self.timeout_watchdog is not None and self.timeout_watchdog.active():
self.timeout_watchdog.cancel()
self.transport.stopListening()
def startProtocol(self):
self.started = True
def connectionRefused(self):
self.finish()
def datagramReceived(self, datagram):
if datagram.opcode == OP_ACK:
return self.tftp_ACK(datagram)
elif datagram.opcode == OP_ERROR:
log.msg("Got error: %s" % datagram)
self.cancel()
def tftp_ACK(self, datagram):
"""Handle the incoming ACK TFTP datagram.
@type datagram: L{ACKDatagram}
"""
if datagram.blocknum < self.blocknum:
log.msg("Duplicate ACK for blocknum %s" % datagram.blocknum)
elif datagram.blocknum == self.blocknum:
if self.timeout_watchdog is not None and self.timeout_watchdog.active():
self.timeout_watchdog.cancel()
if self.completed:
log.msg("Final ACK received, transfer successful")
self.cancel()
else:
return self.nextBlock()
else:
self.transport.write(ERRORDatagram.from_code(
ERR_ILLEGAL_OP, "Block number mismatch").to_wire())
def nextBlock(self):
"""ACK datagram for the previous block has been received. Attempt to read
the next block, that will be sent.
"""
self.blocknum += 1
d = maybeDeferred(self.reader.read, self.block_size)
d.addCallbacks(callback=self.dataFromReader, errback=self.readFailed)
return d
def dataFromReader(self, data):
"""Got data from the reader. Send it to the network and start the timeout
cycle.
"""
# reached maximum number of blocks. Rolling over
if self.blocknum == 65536:
self.blocknum = 0
if len(data) < self.block_size:
self.completed = True
bytes = DATADatagram(self.blocknum, data).to_wire()
self.timeout_watchdog = SequentialCall.run(self.timeout[:-1],
callable=self.sendData, callable_args=[bytes, ],
on_timeout=lambda: self._clock.callLater(self.timeout[-1], self.timedOut),
run_now=True,
_clock=self._clock
)
def readFailed(self, fail):
"""The reader reported an error. Notify the remote end and cancel the transfer"""
log.err(fail)
self.transport.write(ERRORDatagram.from_code(ERR_NOT_DEFINED, "Read failed").to_wire())
self.cancel()
def timedOut(self):
"""Timeout iterable has been exhausted. End the transfer"""
log.msg("Session timed out, last wait was %s seconds long" % self.timeout[-1])
self.cancel()
def sendData(self, bytes):
"""Send data to the remote peer
@param bytes: bytes to send
@type bytes: C{str}
"""
self.transport.write(bytes)
| |
#!/usr/bin/env python
import os
import math
import pdb,glob
import numpy as np
from scipy.io import loadmat,savemat
from scipy.sparse import csr_matrix
import matplotlib.pyplot as plt
from scipy.interpolate import spline
from scipy.interpolate import interp1d
from scipy.interpolate import InterpolatedUnivariateSpline
from sklearn.cluster import KMeans
from mpl_toolkits.mplot3d import Axes3D
from sets import Set
from warpTrj2parallel import loadWarpMtx
import statsmodels.api as sm
import cv2
import pdb
# from DataPathclass import *
# DataPathobj = DataPath(dataSource,VideoIndex)
# from parameterClass import *
# Parameterobj = parameter(dataSource,VideoIndex)
def warpTrj_using_Mtx(x_mtx,y_mtx,warpingMtx,limitX,limitY):
'warp the trj and save to warpped'
xyTupleMtx = np.zeros((x_mtx.shape[0],x_mtx.shape[1],2))
xyTupleMtx[:,:,0] = np.array(x_mtx,dtype='float32') #first dim is X!
xyTupleMtx[:,:,1] = np.array(y_mtx,dtype='float32')
warpped_xyTupleMtx = cv2.perspectiveTransform(np.array([xyTupleMtx.reshape((-1,2))],dtype='float32'), np.array(warpingMtx,dtype='float32'))[0,:,:].reshape((-1,Parameterobj.trunclen,2))
# warpped_x_mtx = np.int16(warpped_xyTupleMtx[:,:,0])
# warpped_y_mtx = np.int16(warpped_xyTupleMtx[:,:,1])
warpped_x_mtx = warpped_xyTupleMtx[:,:,0]
warpped_y_mtx = warpped_xyTupleMtx[:,:,1]
"""how to deal with out of range?????"""
warpped_x_mtx[warpped_x_mtx<0] = 0
warpped_y_mtx[warpped_y_mtx<0] = 0
warpped_x_mtx[warpped_x_mtx>=limitX] = limitX
warpped_y_mtx[warpped_y_mtx>=limitY] = limitY
warpped_x_mtx[x_mtx==0]=0
warpped_y_mtx[y_mtx==0]=0
return warpped_x_mtx,warpped_y_mtx
def filteringCriterion(xk,yk,xspd,yspd):
minspdth = Parameterobj.minspdth
transth = Parameterobj.transth
speed = np.abs(xspd)+np.abs(yspd)
livelong = len(xk)>Parameterobj.livelong_thresh # chk if trj is long enough
loc_change_th = Parameterobj.loc_change
if not livelong:
return False
else:
notStationary = sum(speed<3) < transth
moving1 = np.max(speed)>minspdth # check if it is a moving point
moving2 = (np.abs(np.sum(xspd))>=1e-2) and (np.abs(np.sum(yspd))>=1e-2)
loc_change = (np.max(xk)-np.min(xk)>=loc_change_th) or (np.max(yk)-np.min(yk)>=loc_change_th)
# if (np.sum(xspd)<=1e-2 and np.sum(xaccelerate)<=1e-1) or (np.sum(yspd)<=1e-2 and np.sum(yaccelerate)<=1e-1):
# if len(xspd)<=3 and (np.sum(xspd)<=1e-2) and (np.sum(xaccelerate)<=1e-1):
# if ((np.abs(np.sum(xspd))<=1e-2) and (np.abs(np.sum(yspd))<=1e-2)) or ((np.max(xk)-np.min(xk)<=5) and (np.max(yk)-np.min(yk)<=5)):
return bool(livelong*notStationary*moving1*moving2*loc_change)
def polyFitTrj(x,y,t,goodTrj):
p3 = [] #polynomial coefficients, order 3
for kk in goodTrj:
# p3.append(np.polyfit(x[kk,:][x[kk,:]!=0], y[kk,:][y[kk,:]!=0], 3)) # fit a poly line to the last K points
#### p3.append(np.polyfit( y[kk,:][y[kk,:]!=0], x[kk,:][x[kk,:]!=0],3))
# p3.append(np.polyfit(x[kk,:][x[kk,:]!=0], y[kk,:][y[kk,:]!=0], 2))
p3.append(np.polyfit(x[kk,:][t[kk,:]!=stuffer], y[kk,:][t[kk,:]!=stuffer], 2))
outlierID =[]
p3 = np.array(p3)
goodTrj = np.array(goodTrj)
"""Filtering based on curve shape, outlier of p3 discarded.
what abnormal trjs are you filtering out??? plot those bad outlier trjs"""
"""Maybe we should skip this??? draw me!"""
"""you will keep some very ziggy/jumy trjs"""
# for ii in range(p3.shape[1]):
# data = p3[:,ii]
# outlierID = outlierID+ list(np.where(np.isnan(data)==True)[0])
# mu,std = fitGaussian(data[np.ones(len(data), dtype=bool)-np.isnan(data)])
# outlierID = outlierID + list(np.where(data>=mu+std)[0])+list(np.where(data<=mu-std)[0])
# # print p3[outlierID,:]
# outlierID = np.unique(outlierID)
# TFid = np.ones(len(goodTrj),'bool')
# TFid[outlierID] = False
# goodTrj = goodTrj[TFid]
# p3 = p3[TFid]
return np.array(p3)
def filtering(x,y,xspd_mtx,yspd_mtx,t):
badTrj = []
goodTrj = []
for kk in range(x.shape[0]):
stuffer=np.max(t)
xk = x[kk,:][t[kk,:]!=stuffer] #use t to indicate
yk = y[kk,:][t[kk,:]!=stuffer]
# xaccelerate = np.diff(xspd)
# yaccelerate = np.diff(yspd)
xspd = xspd_mtx[kk,:][t[kk,:]!=stuffer][1:]
yspd = yspd_mtx[kk,:][t[kk,:]!=stuffer][1:]
# print sum(xspd)
# print sum(accelerate)
satisfyCriterion = filteringCriterion(xk,yk,xspd,yspd)
if not satisfyCriterion:
badTrj.append(kk)
# plt.plot(x[kk,:][x[kk,:]!=0],y[kk,:][y[kk,:]!=0])
else:
goodTrj.append(kk)
return np.array(goodTrj)
# extrapolate original trj
def extraPolate(xk, yk):
# positions to inter/extrapolate
# y_extraPosistion = np.linspace(start_Y, end_Y, 2)
y_extraPosistion = range(start_Y, end_Y, 1)
# spline order: 1 linear, 2 quadratic, 3 cubic ...
order = 1
# do inter/extrapolation
spline = InterpolatedUnivariateSpline(yk, xk, k=order)
x_extraPosistion = spline(y_extraPosistion)
# example showing the interpolation for linear, quadratic and cubic interpolation
# plt.figure()
plt.plot(x_extraPosistion, y_extraPosistion)
plt.draw()
# pdb.set_trace()
# for order in range(1, 4):
# s = InterpolatedUnivariateSpline(xi, yi, k=order)
# y = s(x)
# plt.plot(x, y)
plt.show()
def smooth(xk, yk):
# # x_smooth = np.linspace(xk.min(), xk.max(), 200)
# # y_smooth = spline(xk, yk, x_smooth,order = 2)
# y_smooth = np.linspace(yk.min(), yk.max(), 200)
# # x_smooth = spline(yk, xk, y_smooth, order = 1)
# s = InterpolatedUnivariateSpline(yk, xk, k=2)
# x_smooth = s(y_smooth)
# plt.plot(x_smooth, y_smooth, linewidth=1)
f1 = interp1d(xk, yk, kind='linear', axis=-1, copy=True, bounds_error=True, fill_value=np.nan, assume_sorted=False)
x_smooth_per_pixel = np.arange(xk.min(), xk.max(),0.5)
y_smooth_per_pixel = f1(x_smooth_per_pixel)
x_smooth_same_len = np.linspace(x_smooth_per_pixel.min(), x_smooth_per_pixel.max(),len(xk))
f2 = interp1d(x_smooth_per_pixel, y_smooth_per_pixel, kind='slinear', axis=-1, copy=True, bounds_error=True, fill_value=np.nan, assume_sorted=False)
y_smooth_same_len = f2(x_smooth_same_len)
# plt.plot(x_smooth, y_smooth, linewidth=1)
# plt.draw()
# pdb.set_trace()
return x_smooth_same_len, y_smooth_same_len
# k-means on polynomial coefs
def kmeansPolyCoeff(p3):
np.random.seed(5)
estimators = {'k_means_20': KMeans(n_clusters=20),
'k_means_8': KMeans(n_clusters=8),
'k_means_bad_init': KMeans(n_clusters=30, n_init=1,
init='random')}
fignum = 1
for name, est in estimators.items():
est.fit(p3)
labels = est.labels_
fig = plt.figure(fignum, figsize=(4, 3))
plt.clf()
plt.title(str(name))
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
ax.scatter(p3[:1000, 0], p3[:1000, 1], p3[:1000, 2], c=labels[:1000].astype(np.float))
"""plot the raw trjs not the coefficients. see the mean center trjs, what are they look like"""
# ax.w_xaxis.set_ticklabels([])
# ax.w_yaxis.set_ticklabels([])
# ax.w_zaxis.set_ticklabels([])
fignum = fignum + 1
def readData(matfile):
# print "Processing truncation...", str(matidx+1)
ptstrj = loadmat(matfile)
x = csr_matrix(ptstrj['xtracks'], shape=ptstrj['xtracks'].shape).toarray()
y = csr_matrix(ptstrj['ytracks'], shape=ptstrj['ytracks'].shape).toarray()
t = csr_matrix(ptstrj['Ttracks'], shape=ptstrj['Ttracks'].shape).toarray()
return x,y,t,ptstrj
def lowessSmooth(xk,yk):
## fit (x,y) # use smooth() func to do spatial smooth
# lowessXY = sm.nonparametric.lowess(yk, xk, frac=0.1)
# plt.figure()
# plt.plot(xk, yk, '+')
# plt.plot(lowessXY[:, 0], lowessXY[:, 1])
# plt.show()
#fit x(t) and y(t) seperately
lowessX = sm.nonparametric.lowess(xk,range(len(xk)), frac=max(2.0/len(xk),0.1))
# plt.figure('smooth X(t)')
# plt.plot(range(len(xk)), xk, '+')
# plt.plot(lowessX[:, 0], lowessX[:, 1])
# plt.show()
xk_smooth = lowessX[:, 1]
lowessY = sm.nonparametric.lowess(yk,range(len(yk)), frac=max(2.0/len(xk),0.1))
# plt.figure('smooth Y(t)')
# plt.plot(range(len(yk)), yk, '+')
# plt.plot(lowessY[:, 0], lowessY[:, 1])
# plt.show()
yk_smooth = lowessY[:, 1]
# if np.sum(np.isnan(xk_smooth))>0:
# print 'X nan!!'
# if np.sum(np.isnan(yk_smooth))>0:
# print 'Y nan!!'
"""lowess returns nan and does not warn if there are too few neighbors!"""
xk_smooth[np.isnan(xk_smooth)] = xk[np.isnan(xk_smooth)]
yk_smooth[np.isnan(yk_smooth)] = yk[np.isnan(yk_smooth)]
return xk_smooth, yk_smooth
def getSpdMtx(dataMtx_withnan):
spdMtx = np.hstack((np.ones((dataMtx_withnan.shape[0],1))*np.nan,np.diff(dataMtx_withnan)))
spdMtx[np.isnan(spdMtx)]=0 # change every nan to 0
return spdMtx
def getSmoothMtx(x,y,t):
x_spatial_smooth_mtx = np.zeros(x.shape)
y_spatial_smooth_mtx = np.zeros(y.shape)
x_time_smooth_mtx = np.ones(x.shape)*np.nan
y_time_smooth_mtx = np.ones(y.shape)*np.nan
for kk in range(0,x.shape[0],1):
# print 'processing', kk, 'th trj'
# xk = x[kk,:][x[kk,:]!=0]
# yk = y[kk,:][y[kk,:]!=0]d
stuffer=np.max(t)
xk = x[kk,:][t[kk,:]!=stuffer] #use t to indicate
yk = y[kk,:][t[kk,:]!=stuffer]
if len(xk)>Parameterobj.livelong_thresh and (min(xk.max()-xk.min(), yk.max()-yk.min())>Parameterobj.loc_change): # range span >=2 pixels # loger than 5, otherwise all zero out
if len(xk)!=len(yk):
pdb.set_trace()
"""since trjs are too many, pre-filter out bad ones first before smoothing!!"""
"""prefiltering using not very precise speed before smooth"""
if not filteringCriterion(xk,yk,np.diff(xk),np.diff(yk)):
continue
x_spatial_smooth, y_spatial_smooth = smooth(xk, yk)
x_time_smooth, y_time_smooth = lowessSmooth(xk, yk)
# x_spatial_smooth_mtx[kk,:][x[kk,:]!=0]=x_spatial_smooth
# y_spatial_smooth_mtx[kk,:][y[kk,:]!=0]=y_spatial_smooth
# x_time_smooth_mtx[kk,:][x[kk,:]!=0]=x_time_smooth
# y_time_smooth_mtx[kk,:][y[kk,:]!=0]=y_time_smooth
x_spatial_smooth_mtx[kk,:][t[kk,:]!=stuffer]=x_spatial_smooth
y_spatial_smooth_mtx[kk,:][t[kk,:]!=stuffer]=y_spatial_smooth
x_time_smooth_mtx[kk,:][t[kk,:]!=stuffer]=x_time_smooth
y_time_smooth_mtx[kk,:][t[kk,:]!=stuffer]=y_time_smooth
xspd_smooth_mtx = getSpdMtx(x_time_smooth_mtx)
yspd_smooth_mtx = getSpdMtx(y_time_smooth_mtx)
x_time_smooth_mtx[np.isnan(x_time_smooth_mtx)]=0 # change nan back to zero for sparsity
y_time_smooth_mtx[np.isnan(y_time_smooth_mtx)]=0
return x_spatial_smooth_mtx,y_spatial_smooth_mtx,x_time_smooth_mtx,y_time_smooth_mtx, xspd_smooth_mtx,yspd_smooth_mtx
def plotTrj(x,y,t,p3=[],Trjchoice=[]):
if Trjchoice==[]:
Trjchoice=range(x.shape[0])
plt.ion()
plt.figure()
for ii in range(0,len(Trjchoice),1):
kk = Trjchoice[ii]
# xk = x[kk,:][x[kk,:]!=0]
# yk = y[kk,:][y[kk,:]!=0]
stuffer=np.max(t)
xk = x[kk,:][t[kk,:]!=stuffer] #use t to indicate
yk = y[kk,:][t[kk,:]!=stuffer]
if len(xk)>=Parameterobj.livelong_thresh and (min(xk.max()-xk.min(), yk.max()-yk.min())>2): # range span >=2 pixels
# plt.plot(xk)
# plt.plot(yk)
# plt.plot(xk, yk)
# extraPolate(xk, yk)
'''1'''
x_fit = np.linspace(xk.min(), xk.max(), 200)
# y_fit = pow(x_fit,3)*p3[ii,0] + pow(x_fit,2)*p3[ii,1] + pow(x_fit,1)*p3[ii,2]+ p3[ii,3]
y_fit = pow(x_fit,2)*p3[ii,0]+pow(x_fit,1)*p3[ii,1]+ p3[ii,2]
x_range = xk.max()-xk.min()
x_fit_extra = np.linspace(max(0,xk.min()-x_range*0.50), min(xk.max()+x_range*0.50,700), 200)
# y_fit_extra = pow(x_fit_extra,3)*p3[ii,0] + pow(x_fit_extra,2)*p3[ii,1] + pow(x_fit_extra,1)*p3[ii,2]+ p3[ii,3]
y_fit_extra = pow(x_fit_extra,2)*p3[ii,0]+pow(x_fit_extra,1)*p3[ii,1]+ p3[ii,2]
# '''2'''
# y_fit = np.linspace(yk.min(), yk.max(), 200)
# x_fit = pow(y_fit,3)*p3[ii,0] + pow(y_fit,2)*p3[ii,1] + pow(y_fit,1)*p3[ii,2]+ p3[ii,3]
# plt.plot(x_fit_extra, y_fit_extra,'r')
# plt.plot(x_fit, y_fit,'g')
plt.plot(x_fit, y_fit)
plt.draw()
pdb.set_trace()
plt.show()
pdb.set_trace()
def saveSmoothMat(x_smooth_mtx,y_smooth_mtx,xspd_smooth_mtx,yspd_smooth_mtx,goodTrj,ptstrj,matfile,p3 = None):
print "saving smooth new trj:", matfile
"""only keep the goodTrj, delete all bad ones"""
ptstrjNew = {}
goodTrj.astype(int)
ptstrjNew['xtracks'] = csr_matrix(x_smooth_mtx[goodTrj,:])
ptstrjNew['ytracks'] = csr_matrix(y_smooth_mtx[goodTrj,:])
ptstrjNew['Ttracks'] = ptstrj['Ttracks'][goodTrj,:]
ptstrjNew['trjID'] = ptstrj['trjID'][:,goodTrj]
ptstrjNew['Huetracks'] = ptstrj['Huetracks'][goodTrj,:]
if Parameterobj.useSBS:
ptstrjNew['fg_blob_index'] = ptstrj['fg_blob_index'][goodTrj,:]
ptstrjNew['fg_blob_center_X'] = ptstrj['fg_blob_center_X'][goodTrj,:]
ptstrjNew['fg_blob_center_Y'] = ptstrj['fg_blob_center_Y'][goodTrj,:]
# ptstrjNew['polyfitCoef'] = p3
ptstrjNew['xspd'] = csr_matrix(xspd_smooth_mtx[goodTrj,:])
ptstrjNew['yspd'] = csr_matrix(yspd_smooth_mtx[goodTrj,:])
ptstrjNew['Xdir'] = np.sum(xspd_smooth_mtx[goodTrj,:],1)>=0
ptstrjNew['Ydir'] = np.sum(yspd_smooth_mtx[goodTrj,:],1)>=0
if Parameterobj.useWarpped:
_, _, warpingMtx, limitX, limitY = loadWarpMtx()
warpped_x_mtx,warpped_y_mtx = warpTrj_using_Mtx(x_smooth_mtx[goodTrj,:],y_smooth_mtx[goodTrj,:],warpingMtx,limitX, limitY)
ptstrjNew['xtracks_warpped'] = csr_matrix(warpped_x_mtx)
ptstrjNew['ytracks_warpped'] = csr_matrix(warpped_y_mtx)
warpped_xspd_mtx = getSpdMtx(warpped_x_mtx)
warpped_yspd_mtx = getSpdMtx(warpped_y_mtx)
ptstrjNew['xspd_warpped'] = csr_matrix(warpped_xspd_mtx)
ptstrjNew['yspd_warpped'] = csr_matrix(warpped_yspd_mtx)
ptstrjNew['Xdir_warpped'] = np.sum(warpped_xspd_mtx,1)>=0
ptstrjNew['Ydir_warpped'] = np.sum(warpped_yspd_mtx,1)>=0
# plt.figure()
# ax1 = plt.subplot2grid((1,3),(0, 0))
# ax2 = plt.subplot2grid((1,3),(0, 1))
# ax3 = plt.subplot2grid((1,3),(0, 2))
"""visualize before and after warping"""
# if Parameterobj.useWarpped:
# # bkg = cv2.imread('/media/My Book/DOT Video/2015-06-20_08h/frames2/00000000.jpg')
# # im = plt.imshow(bkg[:,:,::-1])
# for ii in range(len(goodTrj)):
# print ii
# xraw = x_smooth_mtx[goodTrj,:][ii,:]
# yraw = y_smooth_mtx[goodTrj,:][ii,:]
# start = min(np.min(np.where(xraw!=0)[0]),np.min(np.where(yraw!=0)[0]))
# end = max(np.max(np.where(xraw!=0)[0]),np.max(np.where(yraw!=0)[0]))
# xraw = xraw[start:end+1]
# yraw = yraw[start:end+1]
# xnew = warpped_x_mtx[ii,:][start:end+1]
# ynew = warpped_y_mtx[ii,:][start:end+1]
# plt.subplot(121)
# plt.axis('off')
# plt.plot(xraw,yraw,color = 'red',linewidth=2)
# plt.title('tracklets before perspective transformation', fontsize=10)
# plt.subplot(122)
# plt.ylim(700,0) ## flip the Y axis
# plt.plot(xnew,ynew,color = 'black',linewidth=2)
# plt.title('tracklets after perspective transformation', fontsize=10)
# plt.draw()
# plt.axis('off')
# parentPath = os.path.dirname(matfile)
# smoothPath = os.path.join(parentPath,'smooth/')
# if not os.path.exists(smoothPath):
# os.mkdir(smoothPath)
# onlyFileName = matfile[len(parentPath)+1:]
onlyFileName = matfile[len(DataPathobj.kltpath):]
savename = os.path.join(DataPathobj.smoothpath,onlyFileName)
savemat(savename,ptstrjNew)
# if __name__ == '__main__':
def fit_extrapolate_main(dataSource,VideoIndex):
# define start and end regions
#Canal video's dimensions:
# """(528, 704, 3)
# start :<=100,
# end: >=500,"""
import DataPathclass
global DataPathobj
DataPathobj = DataPathclass.DataPath(dataSource,VideoIndex)
import parameterClass
global Parameterobj
Parameterobj = parameterClass.parameter(dataSource,VideoIndex)
start_Y = 100;
end_Y = 500;
# matfilepath = '/Users/Chenge/Desktop/testklt/'
matfilepath = DataPathobj.kltpath
matfiles = sorted(glob.glob(matfilepath + '*.mat'))
# matfiles = sorted(glob.glob(matfilepath + 'klt_*.mat'))
# matfiles = sorted(glob.glob(matfilepath + 'sim*.mat'))
start_position = 0
matfiles = matfiles[start_position:]
existingFiles = sorted(glob.glob(DataPathobj.smoothpath+'*.mat'))
existingFileNames = []
for jj in range(len(existingFiles)):
existingFileNames.append(int(existingFiles[jj][-7:-4]))
# for matidx,matfile in enumerate(matfiles):
for matidx in range(len(matfiles)):
if (matidx+1) in existingFileNames:
print "alredy processed ", str(matidx+1)
continue
matfile = matfiles[matidx]
# "if consecutive points are similar to each other, merge them, using one to represent"
# didn't do this, smooth and resample instead
print "reading data", matfile
x,y,t,ptstrj = readData(matfile)
print "get spatial and temporal smooth matrix"
x_spatial_smooth_mtx,y_spatial_smooth_mtx,x_time_smooth_mtx,y_time_smooth_mtx, xspd_smooth_mtx,yspd_smooth_mtx = getSmoothMtx(x,y,t)
"""delete all-zero rows"""
good_index_before_filtering = np.where(np.sum(x_spatial_smooth_mtx,1)!=0)
x_spatial_smooth_mtx = x_spatial_smooth_mtx[good_index_before_filtering,:][0,:,:]
y_spatial_smooth_mtx = y_spatial_smooth_mtx[good_index_before_filtering,:][0,:,:]
x_time_smooth_mtx = x_time_smooth_mtx[good_index_before_filtering,:][0,:,:]
y_time_smooth_mtx = y_time_smooth_mtx[good_index_before_filtering,:][0,:,:]
xspd_smooth_mtx = xspd_smooth_mtx[good_index_before_filtering,:][0,:,:]
yspd_smooth_mtx = yspd_smooth_mtx[good_index_before_filtering,:][0,:,:]
t = t[good_index_before_filtering,:][0,:,:]
# plotTrj(x_smooth_mtx,y_smooth_mtx)
print "filtering out bad trajectories"
goodTrj = filtering(x_spatial_smooth_mtx,y_spatial_smooth_mtx,xspd_smooth_mtx,yspd_smooth_mtx,t)
# kmeansPolyCoeff(p3)
# plotTrj(x_spatial_smooth_mtx,y_spatial_smooth_mtx,t,Trjchoice = goodTrj)
print "saving=======!!"
saveSmoothMat(x_time_smooth_mtx,y_time_smooth_mtx,xspd_smooth_mtx,yspd_smooth_mtx,goodTrj,ptstrj,matfile)
| |
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The main component of OpenFlow controller.
- Handle connections from switches
- Generate and route events to appropriate entities like Ryu applications
"""
import contextlib
from ryu import cfg
import logging
from ryu.lib import hub
from ryu.lib.hub import StreamServer
import traceback
import random
import ssl
from socket import IPPROTO_TCP, TCP_NODELAY
import warnings
import ryu.base.app_manager
from ryu.ofproto import ofproto_common
from ryu.ofproto import ofproto_parser
from ryu.ofproto import ofproto_protocol
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import nx_match
from ryu.controller import handler
from ryu.controller import ofp_event
from ryu.lib.dpid import dpid_to_str
LOG = logging.getLogger('ryu.controller.controller')
CONF = cfg.CONF
CONF.register_cli_opts([
cfg.StrOpt('ofp-listen-host', default='', help='openflow listen host'),
cfg.IntOpt('ofp-tcp-listen-port', default=ofproto_common.OFP_TCP_PORT,
help='openflow tcp listen port'),
cfg.IntOpt('ofp-ssl-listen-port', default=ofproto_common.OFP_SSL_PORT,
help='openflow ssl listen port'),
cfg.StrOpt('ctl-privkey', default=None, help='controller private key'),
cfg.StrOpt('ctl-cert', default=None, help='controller certificate'),
cfg.StrOpt('ca-certs', default=None, help='CA certificates')
])
class OpenFlowController(object):
def __init__(self):
super(OpenFlowController, self).__init__()
# entry point
def __call__(self):
# LOG.debug('call')
self.server_loop()
def server_loop(self):
if CONF.ctl_privkey is not None and CONF.ctl_cert is not None:
if CONF.ca_certs is not None:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_ssl_listen_port),
datapath_connection_factory,
keyfile=CONF.ctl_privkey,
certfile=CONF.ctl_cert,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=CONF.ca_certs,
ssl_version=ssl.PROTOCOL_TLSv1)
else:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_ssl_listen_port),
datapath_connection_factory,
keyfile=CONF.ctl_privkey,
certfile=CONF.ctl_cert,
ssl_version=ssl.PROTOCOL_TLSv1)
else:
server = StreamServer((CONF.ofp_listen_host,
CONF.ofp_tcp_listen_port),
datapath_connection_factory)
# LOG.debug('loop')
server.serve_forever()
def _deactivate(method):
def deactivate(self):
try:
method(self)
finally:
self.is_active = False
return deactivate
class Datapath(ofproto_protocol.ProtocolDesc):
def __init__(self, socket, address):
super(Datapath, self).__init__()
self.socket = socket
self.socket.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1)
self.address = address
self.is_active = True
# The limit is arbitrary. We need to limit queue size to
# prevent it from eating memory up
self.send_q = hub.Queue(16)
self.xid = random.randint(0, self.ofproto.MAX_XID)
self.id = None # datapath_id is unknown yet
self._ports = None
self.flow_format = ofproto_v1_0.NXFF_OPENFLOW10
self.ofp_brick = ryu.base.app_manager.lookup_service_brick('ofp_event')
self.set_state(handler.HANDSHAKE_DISPATCHER)
def _get_ports(self):
if (self.ofproto_parser is not None and
self.ofproto_parser.ofproto.OFP_VERSION >= 0x04):
message = (
'Datapath#ports is kept for compatibility with the previous '
'openflow versions (< 1.3). '
'This not be updated by EventOFPPortStatus message. '
'If you want to be updated, you can use '
'\'ryu.controller.dpset\' or \'ryu.topology.switches\'.'
)
warnings.warn(message, stacklevel=2)
return self._ports
def _set_ports(self, ports):
self._ports = ports
# To show warning when Datapath#ports is read
ports = property(_get_ports, _set_ports)
def close(self):
self.set_state(handler.DEAD_DISPATCHER)
def set_state(self, state):
self.state = state
ev = ofp_event.EventOFPStateChange(self)
ev.state = state
self.ofp_brick.send_event_to_observers(ev, state)
# Low level socket handling layer
@_deactivate
def _recv_loop(self):
buf = bytearray()
required_len = ofproto_common.OFP_HEADER_SIZE
count = 0
while self.is_active:
ret = self.socket.recv(required_len)
if len(ret) == 0:
self.is_active = False
break
buf += ret
while len(buf) >= required_len:
(version, msg_type, msg_len, xid) = ofproto_parser.header(buf)
required_len = msg_len
if len(buf) < required_len:
break
msg = ofproto_parser.msg(self,
version, msg_type, msg_len, xid, buf)
# LOG.debug('queue msg %s cls %s', msg, msg.__class__)
if msg:
ev = ofp_event.ofp_msg_to_ev(msg)
self.ofp_brick.send_event_to_observers(ev, self.state)
dispatchers = lambda x: x.callers[ev.__class__].dispatchers
handlers = [handler for handler in
self.ofp_brick.get_handlers(ev) if
self.state in dispatchers(handler)]
for handler in handlers:
handler(ev)
buf = buf[required_len:]
required_len = ofproto_common.OFP_HEADER_SIZE
# We need to schedule other greenlets. Otherwise, ryu
# can't accept new switches or handle the existing
# switches. The limit is arbitrary. We need the better
# approach in the future.
count += 1
if count > 2048:
count = 0
hub.sleep(0)
@_deactivate
def _send_loop(self):
try:
while self.is_active:
buf = self.send_q.get()
self.socket.sendall(buf)
finally:
q = self.send_q
# first, clear self.send_q to prevent new references.
self.send_q = None
# there might be threads currently blocking in send_q.put().
# unblock them by draining the queue.
try:
while q.get(block=False):
pass
except hub.QueueEmpty:
pass
def send(self, buf):
if self.send_q:
self.send_q.put(buf)
def set_xid(self, msg):
self.xid += 1
self.xid &= self.ofproto.MAX_XID
msg.set_xid(self.xid)
return self.xid
def send_msg(self, msg):
assert isinstance(msg, self.ofproto_parser.MsgBase)
if msg.xid is None:
self.set_xid(msg)
msg.serialize()
# LOG.debug('send_msg %s', msg)
self.send(msg.buf)
def serve(self):
send_thr = hub.spawn(self._send_loop)
# send hello message immediately
hello = self.ofproto_parser.OFPHello(self)
self.send_msg(hello)
try:
self._recv_loop()
finally:
hub.kill(send_thr)
hub.joinall([send_thr])
#
# Utility methods for convenience
#
def send_packet_out(self, buffer_id=0xffffffff, in_port=None,
actions=None, data=None):
if in_port is None:
in_port = self.ofproto.OFPP_NONE
packet_out = self.ofproto_parser.OFPPacketOut(
self, buffer_id, in_port, actions, data)
self.send_msg(packet_out)
def send_flow_mod(self, rule, cookie, command, idle_timeout, hard_timeout,
priority=None, buffer_id=0xffffffff,
out_port=None, flags=0, actions=None):
if priority is None:
priority = self.ofproto.OFP_DEFAULT_PRIORITY
if out_port is None:
out_port = self.ofproto.OFPP_NONE
flow_format = rule.flow_format()
assert (flow_format == ofproto_v1_0.NXFF_OPENFLOW10 or
flow_format == ofproto_v1_0.NXFF_NXM)
if self.flow_format < flow_format:
self.send_nxt_set_flow_format(flow_format)
if flow_format == ofproto_v1_0.NXFF_OPENFLOW10:
match_tuple = rule.match_tuple()
match = self.ofproto_parser.OFPMatch(*match_tuple)
flow_mod = self.ofproto_parser.OFPFlowMod(
self, match, cookie, command, idle_timeout, hard_timeout,
priority, buffer_id, out_port, flags, actions)
else:
flow_mod = self.ofproto_parser.NXTFlowMod(
self, cookie, command, idle_timeout, hard_timeout,
priority, buffer_id, out_port, flags, rule, actions)
self.send_msg(flow_mod)
def send_flow_del(self, rule, cookie, out_port=None):
self.send_flow_mod(rule=rule, cookie=cookie,
command=self.ofproto.OFPFC_DELETE,
idle_timeout=0, hard_timeout=0, priority=0,
out_port=out_port)
def send_delete_all_flows(self):
rule = nx_match.ClsRule()
self.send_flow_mod(
rule=rule, cookie=0, command=self.ofproto.OFPFC_DELETE,
idle_timeout=0, hard_timeout=0, priority=0, buffer_id=0,
out_port=self.ofproto.OFPP_NONE, flags=0, actions=None)
def send_barrier(self):
barrier_request = self.ofproto_parser.OFPBarrierRequest(self)
self.send_msg(barrier_request)
def send_nxt_set_flow_format(self, flow_format):
assert (flow_format == ofproto_v1_0.NXFF_OPENFLOW10 or
flow_format == ofproto_v1_0.NXFF_NXM)
if self.flow_format == flow_format:
# Nothing to do
return
self.flow_format = flow_format
set_format = self.ofproto_parser.NXTSetFlowFormat(self, flow_format)
# FIXME: If NXT_SET_FLOW_FORMAT or NXFF_NXM is not supported by
# the switch then an error message will be received. It may be
# handled by setting self.flow_format to
# ofproto_v1_0.NXFF_OPENFLOW10 but currently isn't.
self.send_msg(set_format)
self.send_barrier()
def is_reserved_port(self, port_no):
return port_no > self.ofproto.OFPP_MAX
def datapath_connection_factory(socket, address):
LOG.debug('connected socket:%s address:%s', socket, address)
with contextlib.closing(Datapath(socket, address)) as datapath:
try:
datapath.serve()
except:
# Something went wrong.
# Especially malicious switch can send malformed packet,
# the parser raise exception.
# Can we do anything more graceful?
if datapath.id is None:
dpid_str = "%s" % datapath.id
else:
dpid_str = dpid_to_str(datapath.id)
LOG.error("Error in the datapath %s from %s", dpid_str, address)
raise
| |
import datetime
from django.template import Template
from django.forms.formsets import formset_factory
from material import Layout, Row, Column, Fieldset, Span2, Span3, Span5, Span6, Span10
from material.fields import FormSetField
from . import demo as forms
COUNTRY_CHOICES = (
('', 'Country'), (244, 'Aaland Islands'), (1, 'Afghanistan'), (2, 'Albania'), (3, 'Algeria'),
(4, 'American Samoa'), (5, 'Andorra'), (6, 'Angola'), (7, 'Anguilla'), (8, 'Antarctica'),
(9, 'Antigua and Barbuda'), (10, 'Argentina'), (11, 'Armenia'), (12, 'Aruba'), (13, 'Australia'),
(14, 'Austria'), (15, 'Azerbaijan'), (16, 'Bahamas'), (17, 'Bahrain'), (18, 'Bangladesh'),
(19, 'Barbados'), (20, 'Belarus'), (21, 'Belgium'), (22, 'Belize'), (23, 'Benin'),
(24, 'Bermuda'), (25, 'Bhutan'), (26, 'Bolivia'), (245, 'Bonaire, Sint Eustatius and Saba'),
(27, 'Bosnia and Herzegovina'), (28, 'Botswana'), (29, 'Bouvet Island'), (30, 'Brazil'),
(31, 'British Indian Ocean Territory'), (32, 'Brunei Darussalam'),
(33, 'Bulgaria'), (34, 'Burkina Faso'), (35, 'Burundi'), (36, 'Cambodia'), (37, 'Cameroon'),
(38, 'Canada'), (251, 'Canary Islands'), (39, 'Cape Verde'), (40, 'Cayman Islands'), (41, 'Central African Republic'),
(42, 'Chad'), (43, 'Chile'), (44, 'China'), (45, 'Christmas Island'), (46, 'Cocos (Keeling) Islands'),
(47, 'Colombia'), (48, 'Comoros'), (49, 'Congo'), (50, 'Cook Islands'), (51, 'Costa Rica'),
(52, "Cote D'Ivoire"), (53, 'Croatia'), (54, 'Cuba'), (246, 'Curacao'), (55, 'Cyprus'),
(56, 'Czech Republic'), (237, 'Democratic Republic of Congo'), (57, 'Denmark'), (58, 'Djibouti'), (59, 'Dominica'),
(60, 'Dominican Republic'), (61, 'East Timor'), (62, 'Ecuador'), (63, 'Egypt'), (64, 'El Salvador'),
(65, 'Equatorial Guinea'), (66, 'Eritrea'), (67, 'Estonia'), (68, 'Ethiopia'), (69, 'Falkland Islands (Malvinas)'),
(70, 'Faroe Islands'), (71, 'Fiji'), (72, 'Finland'), (74, 'France, skypolitan'), (75, 'French Guiana'),
(76, 'French Polynesia'), (77, 'French Southern Territories'), (126, 'FYROM'), (78, 'Gabon'), (79, 'Gambia'),
(80, 'Georgia'), (81, 'Germany'), (82, 'Ghana'), (83, 'Gibraltar'), (84, 'Greece'),
(85, 'Greenland'), (86, 'Grenada'), (87, 'Guadeloupe'), (88, 'Guam'), (89, 'Guatemala'),
(241, 'Guernsey'), (90, 'Guinea'), (91, 'Guinea-Bissau'), (92, 'Guyana'), (93, 'Haiti'),
(94, 'Heard and Mc Donald Islands'), (95, 'Honduras'), (96, 'Hong Kong'), (97, 'Hungary'), (98, 'Iceland'),
(99, 'India'), (100, 'Indonesia'), (101, 'Iran (Islamic Republic of)'), (102, 'Iraq'), (103, 'Ireland'),
(104, 'Israel'), (105, 'Italy'), (106, 'Jamaica'), (107, 'Japan'), (240, 'Jersey'),
(108, 'Jordan'), (109, 'Kazakhstan'), (110, 'Kenya'), (111, 'Kiribati'), (113, 'Korea, Republic of'),
(114, 'Kuwait'), (115, 'Kyrgyzstan'), (116, "Lao People's Democratic Republic"), (117, 'Latvia'), (118, 'Lebanon'),
(119, 'Lesotho'), (120, 'Liberia'), (121, 'Libyan Arab Jamahiriya'), (122, 'Liechtenstein'), (123, 'Lithuania'),
(124, 'Luxembourg'), (125, 'Macau'), (127, 'Madagascar'), (128, 'Malawi'), (129, 'Malaysia'),
(130, 'Maldives'), (131, 'Mali'), (132, 'Malta'), (133, 'Marshall Islands'), (134, 'Martinique'),
(135, 'Mauritania'), (136, 'Mauritius'), (137, 'Mayotte'), (138, 'Mexico'), (139, 'Micronesia, Federated States of'),
(140, 'Moldova, Republic of'), (141, 'Monaco'), (142, 'Mongolia'), (242, 'Montenegro'), (143, 'Montserrat'),
(144, 'Morocco'), (145, 'Mozambique'), (146, 'Myanmar'), (147, 'Namibia'), (148, 'Nauru'),
(149, 'Nepal'), (150, 'Netherlands'), (151, 'Netherlands Antilles'), (152, 'New Caledonia'), (153, 'New Zealand'),
(154, 'Nicaragua'), (155, 'Niger'), (156, 'Nigeria'), (157, 'Niue'), (158, 'Norfolk Island'),
(112, 'North Korea'), (159, 'Northern Mariana Islands'), (160, 'Norway'), (161, 'Oman'), (162, 'Pakistan'),
(163, 'Palau'), (247, 'Palestinian Territory, Occupied'), (164, 'Panama'), (165, 'Papua New Guinea'), (166, 'Paraguay'),
(167, 'Peru'), (168, 'Philippines'), (169, 'Pitcairn'), (170, 'Poland'), (171, 'Portugal'),
(172, 'Puerto Rico'), (173, 'Qatar'), (174, 'Reunion'), (175, 'Romania'), (176, 'Russian Federation'),
(177, 'Rwanda'), (178, 'Saint Kitts and Nevis'), (179, 'Saint Lucia'), (180, 'Saint Vincent and the Grenadines'),
(181, 'Samoa'), (182, 'San Marino'), (183, 'Sao Tome and Principe'), (184, 'Saudi Arabia'), (185, 'Senegal'),
(243, 'Serbia'), (186, 'Seychelles'), (187, 'Sierra Leone'), (188, 'Singapore'), (189, 'Slovak Republic'),
(190, 'Slovenia'), (191, 'Solomon Islands'), (192, 'Somalia'), (193, 'South Africa'),
(194, 'South Georgia & South Sandwich Islands'), (248, 'South Sudan'), (195, 'Spain'), (196, 'Sri Lanka'),
(249, 'St. Barthelemy'), (197, 'St. Helena'), (250, 'St. Martin (French part)'), (198, 'St. Pierre and Miquelon'),
(199, 'Sudan'), (200, 'Suriname'), (201, 'Svalbard and Jan Mayen Islands'), (202, 'Swaziland'),
(203, 'Sweden'), (204, 'Switzerland'), (205, 'Syrian Arab Republic'), (206, 'Taiwan'), (207, 'Tajikistan'),
(208, 'Tanzania, United Republic of'), (209, 'Thailand'), (210, 'Togo'), (211, 'Tokelau'), (212, 'Tonga'),
(213, 'Trinidad and Tobago'), (214, 'Tunisia'), (215, 'Turkey'), (216, 'Turkmenistan'),
(217, 'Turks and Caicos Islands'), (218, 'Tuvalu'), (219, 'Uganda'), (220, 'Ukraine'), (221, 'United Arab Emirates'),
(222, 'United Kingdom'), (223, 'United States'), (224, 'United States Minor Outlying Islands'), (225, 'Uruguay'),
(226, 'Uzbekistan'), (227, 'Vanuatu'), (228, 'Vatican City State (Holy See)'), (229, 'Venezuela'), (230, 'Viet Nam'),
(231, 'Virgin Islands (British)'), (232, 'Virgin Islands (U.S.)'), (233, 'Wallis and Futuna Islands'),
(234, 'Western Sahara'), (235, 'Yemen'), (238, 'Zambia'), (239, 'Zimbabwe'),
)
QUESTION_CHOICES = (
('X01', 'I have a history of problems with anesthesia'),
('X02', 'I smoke'),
('X03', 'I have been addicted to recreational drugs'),
('X04', 'I weak eye contact lenses or glasses'),
('X05', 'I have an implantable devise'),
('X06', 'Blood has been donated for this procedure by a family member'),
('X07', 'I consume alcohol on a regular basis'),
('X08', 'I have teeth and mouth considerations such as loose teeth, caps, bridework, banding, and dentures'),
('X09', 'I have a vascular access devise'),
)
CARDIOVASCULAR_RISK_CHOICES = (
('R01', 'Heart Attack'),
('R02', 'Angina'),
('R03', 'Congestive Heart Failure'),
('R04', 'Previous heart surgery'),
('R05', 'Heart Murmur'),
('R06', 'Mitral Valve Prolapse'),
('R07', 'Internal Defibrillator'),
('R08', 'Paralysis'),
('R09', 'Kidney Disease'),
('R10', 'High Blood Pressure'),
('R11', 'Fast or irregular heat beats'),
('R12', 'Previous Angiosplasy'),
('R13', 'Valvular Heart Disorder'),
('R14', 'Aortic Stenosis'),
('R15', 'Pacemaker'),
('R16', 'Stroke'),
('R17', 'Insulin Dependent Diabetes'),
('R18', 'Shortness of Breath'),
)
APNIA_RISK_CHOICES = (
('A01', 'Loud Snoring'),
('A02', 'Choking while asleep'),
('A03', 'Emphysema'),
('A04', 'Pheumonia'),
('A05', 'Bleeding Disorder'),
('A06', 'Aids or HIV'),
('A07', 'Jaundice'),
('A08', 'Seizure Disorder'),
('A09', 'Thyroid Trouble'),
('A10', 'Joint Replacement'),
('A11', 'Prostate problems'),
('A12', 'Downs Syndrome'),
('A13', 'Excessive Daytime Sleepiness'),
('A14', 'Diagnsed Sleep Apnea'),
('A15', 'Asthma'),
('A16', 'TB'),
('A17', 'Bruise Easy'),
('A18', 'Hepatits'),
('A19', 'Hiatal Hernia'),
('A20', 'Migraine Headaches'),
('A21', 'TMJ (temporomand joint problem)'),
('A22', 'Kidney Problems'),
('A23', 'Steroid Use'),
('A24', 'Witnessed Grasping'),
('A25', 'Bronchitis'),
('A26', 'Wheezing'),
('A27', 'Cystic Fibrosis'),
('A28', 'Anemia'),
('A29', 'Liver Desease'),
('A30', 'Reflus'),
('A31', 'Cancer'),
('A32', 'Athritis'),
('A33', 'Bladder Problems'),
('A34', 'Cortisone Use'),
)
class LoginForm(forms.Form):
email = forms.EmailField()
password = forms.CharField(widget=forms.PasswordInput)
keep_logged = forms.BooleanField(required=False, label="Keep me logged in")
template = Template("""
{% form %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.password prefix %}<i class="mdi-action-lock prefix"></i>{% endpart %}
{% part form.keep_logged add_group_class %}right-align{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="waves-effect waves-teal btn-flat">Register</button>
<button class="waves-effect waves-light btn" type="submit">Login</button>
""")
title = "Login form"
def clean(self):
cleaned_data = super(LoginForm, self).clean()
if cleaned_data.get('email') == 'john@doe.com':
raise forms.ValidationError('John, come on. You are blocked.')
class RegistrationForm(forms.Form):
username = forms.CharField()
email = forms.EmailField(label="Email Address")
password = forms.CharField(widget=forms.PasswordInput)
password_confirm = forms.CharField(widget=forms.PasswordInput, label="Confirm password")
first_name = forms.CharField(required=False)
last_name = forms.CharField(required=False)
gender = forms.ChoiceField(choices=((None, ''), ('F', 'Female'), ('M', 'Male'), ('O', 'Other')))
receive_news = forms.BooleanField(required=False, label='I want to receive news and special offers')
agree_toc = forms.BooleanField(required=True, label='I agree with the Terms and Conditions')
layout = Layout('username', 'email',
Row('password', 'password_confirm'),
Fieldset('Pesonal details',
Row('first_name', 'last_name'),
'gender', 'receive_news', 'agree_toc'))
template = Template("""
{% form %}
{% part form.username prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.password prefix %}<i class="mdi-action-lock-open prefix"></i>{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="waves-effect waves-light btn" type="submit">Submit</button>
""")
title = "Registration form"
class ContactForm(forms.Form):
name = forms.CharField()
email = forms.EmailField()
subject = forms.CharField()
message = forms.CharField(widget=forms.Textarea)
send_copy = forms.BooleanField(required=False,
label="Send a copy to my e-mail address")
template = Template("""
{% form %}
{% part form.name prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.subject prefix %}<i class="mdi-action-announcement prefix"></i>{% endpart %}
{% part form.message prefix %}<i class="mdi-communication-message prefix"></i>{% endpart %}
{% part form.send_copy add_group_class %}right-align{% endpart %}
{% endform %}
""")
layout = Layout(Row('name', 'email'), 'subject', 'message', 'send_copy')
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Send message</button>
""")
title = "Contact form"
class OrderForm(forms.Form):
name = forms.CharField()
company = forms.CharField()
email = forms.EmailField()
phone = forms.CharField()
interest = forms.ChoiceField(choices=((None, 'Interested in'), ('D', 'Design'), ('C', 'Development'),
('I', 'Illustration'), ('B', 'Branding'), ('V', 'Video')))
budget = forms.ChoiceField(choices=((None, 'Budget'), ('S', 'Less than $5000'), ('M', '$5000-$10000'),
('L', '$10000-$20000'), ('XL', 'More than $20000')))
start_date = forms.DateField(label="Expected start date")
finish_date = forms.DateField(label="Expected finish date")
attachment = forms.FileField(label="Include some file...")
message = forms.CharField(widget=forms.Textarea)
layout = Layout('name', 'company', 'email', 'phone',
Row('interest', 'budget'),
Row('start_date', 'finish_date'),
'attachment', 'message')
template = Template("""
{% form %}
{% part form.name prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.company prefix %}<i class="mdi-communication-business prefix"></i>{% endpart %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.phone prefix %}<i class="mdi-communication-call prefix"></i>{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Submit request</button>
""")
title = "Order services"
class CheckoutForm(forms.Form):
first_name = forms.CharField()
last_name = forms.CharField()
email = forms.EmailField()
phone = forms.CharField()
country = forms.ChoiceField(choices=COUNTRY_CHOICES)
city = forms.CharField()
post_code = forms.IntegerField()
address = forms.CharField()
additional_info = forms.CharField(widget=forms.Textarea)
card_type = forms.ChoiceField(choices=(('V', 'Visa'), ('M', 'MasterCard'), ('P', 'Paypal')), widget=forms.RadioSelect)
card_holder = forms.CharField(label="Name on card")
card_number = forms.CharField(label="Card number")
card_ccv2 = forms.IntegerField(label="CVV2")
card_exp_month = forms.ChoiceField(choices=((1, 'January'), (2, 'February'), (3, 'March'),
(4, 'April'), (5, 'May'), (6, 'June'),
(7, 'July'), (8, 'August'), (9, 'September'),
(10, 'October'), (11, 'November'), (12, 'December')))
card_exp_year = forms.IntegerField(label="Year")
layout = Layout(
Row('first_name', 'last_name'),
Row('email', 'phone'),
Row(Span5('country'), Span5('city'), Span2('post_code')),
'address',
'additional_info',
Fieldset('Card Details',
Row(Column('card_type', span_columns=4),
Column('card_holder',
Row(Span10('card_number'), Span2('card_ccv2')),
Row('card_exp_month', 'card_exp_year'),
span_columns=8))))
template = Template("""
{% form %}
{% part form.first_name prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.last_name prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.phone prefix %}<i class="mdi-communication-call prefix"></i>{% endpart %}
{% part form.card_type label %}{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Submit request</button>
""")
title = "Checkout form"
css = """
@media only screen and (min-width : 601px) {
#id_card_type_container {
margin-top: 40px;
margin-left: 50px;
}
"""
class CommentForm(forms.Form):
name = forms.CharField()
email = forms.EmailField()
website = forms.URLField()
comment = forms.CharField(widget=forms.Textarea)
layout = Layout(Row('name', 'email'),
'website', 'comment')
template = Template("""
{% form %}
{% part form.name prefix %}<i class="mdi-action-account-box prefix"></i>{% endpart %}
{% part form.email prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% part form.website prefix %}<i class="mdi-action-wallet-travel prefix"></i>{% endpart %}
{% part form.comment prefix %}<i class="mdi-communication-chat prefix"></i>{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Add comment</button>
""")
title = "Comment form"
class BankForm(forms.Form):
branch_name = forms.CharField()
""" Personal Details """
person_title = forms.ChoiceField(choices=(('Mr', 'Mr.'), ('Mrs.', 'Mrs.'), ('Ms.', 'Ms.')), label='Title')
full_name = forms.CharField()
date_of_birth = forms.DateField()
email = forms.EmailField()
parent_name = forms.CharField(label='In case of a minor please provide details')
nationality = forms.ChoiceField(choices=COUNTRY_CHOICES)
mobile_no = forms.CharField()
existing_bank_account = forms.CharField()
partner_name = forms.CharField(label='Name of father/spouse')
""" Residential address """
flat_bulding = forms.CharField(label='Flat no. and bldg. name')
road_no = forms.CharField(label='Road no./name')
area_and_landmark = forms.CharField(label='Area and landmark')
telephone_residence = forms.CharField()
city = forms.CharField()
office = forms.CharField()
fax = forms.CharField()
pin_code = forms.CharField()
""" Mailing Address """
mailing_company_details = forms.CharField(label="Company name and department/ Flat no. and bldg. name")
mailing_road_no = forms.CharField(label='Road no./name')
mailing_area_and_landmark = forms.CharField(label='Area and landmark')
mailing_city = forms.CharField(label='City')
mailing_mobile = forms.CharField(label='Mobile No.')
mailing_telephone_residence = forms.CharField(label='Telephone Residence')
mailing_office = forms.CharField(label='Office')
mailing_fax = forms.CharField(label='Fax')
mailing_pin_code = forms.CharField(label='Pin Code')
mailing_email = forms.EmailField(label='E-mail')
""" Details of Introduction by Existing Customer """
introducer_name = forms.CharField(label='Customer Name')
introducer_account_no = forms.CharField(label='Account No.')
introducer_signature = forms.CharField(label="Introducer's signature")
""" Account Details """
account_type = forms.ChoiceField(
choices=(('S', 'Savings'), ('C', 'Current'), ('F', 'Fixed deposits')),
label='Choice of account',
widget=forms.RadioSelect)
account_mode = forms.ChoiceField(
choices=(('CS', 'Cash'), ('CQ', 'Cheque'), ('NF', 'NEFT')),
label='Mode of funding',
widget=forms.RadioSelect)
account_amount = forms.FloatField(label='Amount')
""" Details of Fixed Deposit """
deposit_type = forms.ChoiceField(
choices=(('O', 'Ordinary'), ('C', 'Cumulative')),
label='Types of deposit',
widget=forms.RadioSelect)
deposit_mode = forms.ChoiceField(
choices=(('CS', 'Cash'), ('CQ', 'Cheque'), ('NF', 'NEFT')),
label='Mode of funding',
widget=forms.RadioSelect)
deposit_amount = forms.FloatField(label='Amount')
deposit_no = forms.CharField(label='No. of deposits')
deposit_individual_amount = forms.FloatField(label='Individual Deposit Amount')
""" Personal Details """
occupation = forms.ChoiceField(
choices=(('NE', 'Non-executive'), ('HW', 'Housewife'), ('RT', 'Retired'),
('ST', 'Student'), ('OT', 'Other'), ('UN', 'Unemployed')),
widget=forms.RadioSelect)
job_title = forms.CharField()
department = forms.CharField()
nature_of_business = forms.CharField()
education = forms.ChoiceField(
choices=(('UG', 'Under graduate'), ('GR', 'Graduate'), ('OT', 'Others')),
widget=forms.RadioSelect)
montly_income = forms.ChoiceField(
choices=(('000', 'Zero Income'), ('L10', 'Less than $10,000'), ('G10', '$10,000+')),
widget=forms.RadioSelect)
martial_status = forms.ChoiceField(
choices=(('M', 'Married'), ('S', 'Single')),
widget=forms.RadioSelect)
spouse_name = forms.CharField()
""" Other existing bank accounts, if any """
other_account1 = forms.CharField(label='Name of the Bank / branch')
other_account2 = forms.CharField(label='Name of the Bank / branch')
""" Reason for Account opening """
reason = forms.CharField(label="Please specify", widget=forms.Textarea)
""" Terms And Conditions """
terms_accepted = forms.BooleanField(
label="I/We confirm having read and understood the account rules of The Banking Corporation Limited"
" ('the Bank'), and hereby agree to be bound by the terms and conditions and amendments governing the"
" account(s) issued by the Bank from time-to-time.")
layout = Layout(
Fieldset("Please open an account at",
'branch_name'),
Fieldset("Personal Details (Sole/First Accountholder/Minor)",
Row(Span2('person_title'), Span10('full_name')),
Row(Column('date_of_birth',
'email',
'parent_name'),
Column('nationality',
Row('mobile_no', 'existing_bank_account'),
'partner_name'))),
Fieldset('Residential address',
Row('flat_bulding', 'road_no'),
Row(Span10('area_and_landmark'), Span2('city')),
Row('telephone_residence', 'office', 'fax', 'pin_code')),
Fieldset("Mailing Address (If different from the First Accountholder's address)",
'mailing_company_details',
Row('mailing_road_no', 'mailing_area_and_landmark', 'mailing_city', 'mailing_mobile'),
Row('mailing_telephone_residence', 'mailing_office', 'mailing_fax', 'mailing_pin_code'),
'mailing_email'),
Fieldset("Details of Introduction by Existing Customer (If applicable)",
Row('introducer_name', 'introducer_account_no'),
'introducer_signature'),
Fieldset("Account Details",
Row('account_type', 'account_mode'),
'account_amount'),
Fieldset("Details of Fixed Deposit",
Row('deposit_type', 'deposit_mode'),
Row(Span6('deposit_amount'), Span3('deposit_no'), Span3('deposit_individual_amount'))),
Fieldset("Personal Details",
Row('occupation', 'education', 'montly_income'),
'job_title',
Row('department', 'nature_of_business'),
Row('martial_status', 'spouse_name')),
Fieldset("Other existing bank accounts, if any",
Row('other_account1', 'other_account2')),
Fieldset("Reason for Account opening",
'reason'),
Fieldset("Terms And Conditions",
'terms_accepted')
)
template = Template("""
{% form %}
{% part form.account_type add_group_class %}inline{% endpart %}
{% part form.account_mode add_group_class %}inline{% endpart %}
{% part form.deposit_type add_group_class %}inline{% endpart %}
{% part form.deposit_mode add_group_class %}inline{% endpart %}
{% part form.martial_status add_group_class %}inline{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Save application</button>
""")
title = "Personal Bank Account Initial Application"
css = """
.section h5 {
font-size: 1.2rem;
padding-bottom: 0.2rem;
border-bottom: 3px solid black;
}
"""
blockclass = "col s12 m12 l9 offset-l1"
class WizardForm1(forms.Form):
subject = forms.CharField(max_length=100)
sender = forms.EmailField()
class WizardForm2(forms.Form):
message = forms.CharField(widget=forms.Textarea)
class HospitalRegistrationForm(forms.Form):
class EmergencyContractForm(forms.Form):
name = forms.CharField()
relationship = forms.CharField()
daytime_phone = forms.CharField()
evening_phone = forms.CharField(required=False)
registration_date = forms.DateField(initial=datetime.date.today)
full_name = forms.CharField()
birth_date = forms.DateField()
height = forms.IntegerField(help_text='cm')
weight = forms.IntegerField(help_text='kg')
primary_care_physician = forms.CharField()
date_of_last_appointment = forms.DateField()
home_phone = forms.CharField()
work_phone = forms.CharField(required=False)
procedural_questions = forms.MultipleChoiceField(
widget=forms.CheckboxSelectMultiple, required=False,
choices=QUESTION_CHOICES)
cardiovascular_risks = forms.MultipleChoiceField(
widget=forms.CheckboxSelectMultiple, required=False,
choices=CARDIOVASCULAR_RISK_CHOICES)
apnia_risks = forms.MultipleChoiceField(
widget=forms.CheckboxSelectMultiple, required=False,
choices=APNIA_RISK_CHOICES)
emergency_numbers = FormSetField(formset_factory(EmergencyContractForm, extra=2, can_delete=True))
layout = Layout(Row(Column('full_name', 'birth_date',
Row('height', 'weight'), span_columns=3), 'registration_date'),
Row(Span3('primary_care_physician'), 'date_of_last_appointment'),
Row('home_phone', 'work_phone'),
Fieldset('Procedural Questions', 'procedural_questions'),
Fieldset('Clinical Predictores of Cardiovascular Risk', 'cardiovascular_risks'),
Fieldset('Clinical Predictors of sleep Apnia Risk', 'apnia_risks'),
Fieldset('Emergence Numbers', 'emergency_numbers'))
template = Template("""
{% form %}
{% part form.registration_date prefix %}<i class="mdi-editor-insert-invitation prefix"></i>{% endpart %}
{% part form.date_of_last_appointment prefix %}<i class="mdi-editor-insert-invitation prefix"></i>{% endpart %}
{% part form.primary_care_physician prefix %}<i class="mdi-action-face-unlock prefix"></i>{% endpart %}
{% part form.home_phone prefix %}<i class="mdi-communication-call prefix"></i>{% endpart %}
{% part form.work_phone prefix %}<i class="mdi-communication-call prefix"></i>{% endpart %}
{% part form.procedural_questions label %}{% endpart %}
{% part form.cardiovascular_risks label %}{% endpart %}
{% part form.cardiovascular_risks columns %}2{% endpart %}
{% part form.apnia_risks label %}{% endpart %}
{% part form.apnia_risks columns %}3{% endpart %}
{% part form.emergency_numbers label %}{% endpart %}
{% endform %}
""")
buttons = Template("""
<button class="btn btn-primary pull-right" type="submit">Registration</button>
""")
title = "Hospital registration form"
css = """
.section h5 {
font-size: 1.2rem;
padding-bottom: 0.2rem;
border-bottom: 3px solid black;
}
"""
blockclass = "col s12 m12 l9 offset-l1"
| |
"""Tests for the Abode config flow."""
from unittest.mock import patch
from abodepy.exceptions import AbodeAuthenticationException
from abodepy.helpers.errors import MFA_CODE_REQUIRED
from homeassistant import data_entry_flow
from homeassistant.components.abode import config_flow
from homeassistant.components.abode.const import DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
HTTP_BAD_REQUEST,
HTTP_INTERNAL_SERVER_ERROR,
)
from tests.common import MockConfigEntry
CONF_POLLING = "polling"
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_one_config_allowed(hass):
"""Test that only one Abode configuration is allowed."""
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"},
).add_to_hass(hass)
step_user_result = await flow.async_step_user()
assert step_user_result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert step_user_result["reason"] == "single_instance_allowed"
conf = {
CONF_USERNAME: "user@email.com",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
import_config_result = await flow.async_step_import(conf)
assert import_config_result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert import_config_result["reason"] == "single_instance_allowed"
async def test_invalid_credentials(hass):
"""Test that invalid credentials throws an error."""
conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException((HTTP_BAD_REQUEST, "auth error")),
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "invalid_auth"}
async def test_connection_error(hass):
"""Test other than invalid credentials throws an error."""
conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException(
(HTTP_INTERNAL_SERVER_ERROR, "connection error")
),
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "cannot_connect"}
async def test_step_import(hass):
"""Test that the import step works."""
conf = {
CONF_USERNAME: "user@email.com",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
with patch("homeassistant.components.abode.config_flow.Abode"), patch(
"abodepy.UTILS"
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "user@email.com"
assert result["data"] == {
CONF_USERNAME: "user@email.com",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
async def test_step_user(hass):
"""Test that the user step works."""
conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}
with patch("homeassistant.components.abode.config_flow.Abode"), patch(
"abodepy.UTILS"
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "user@email.com"
assert result["data"] == {
CONF_USERNAME: "user@email.com",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
async def test_step_mfa(hass):
"""Test that the MFA step works."""
conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException(MFA_CODE_REQUIRED),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mfa"
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException((HTTP_BAD_REQUEST, "invalid mfa")),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"mfa_code": "123456"}
)
assert result["errors"] == {"base": "invalid_mfa_code"}
with patch("homeassistant.components.abode.config_flow.Abode"), patch(
"abodepy.UTILS"
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"mfa_code": "123456"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "user@email.com"
assert result["data"] == {
CONF_USERNAME: "user@email.com",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
async def test_step_reauth(hass):
"""Test the reauth flow."""
conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}
MockConfigEntry(
domain=DOMAIN,
unique_id="user@email.com",
data=conf,
).add_to_hass(hass)
with patch("homeassistant.components.abode.config_flow.Abode"), patch(
"abodepy.UTILS"
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth"},
data=conf,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch("homeassistant.config_entries.ConfigEntries.async_reload"):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input=conf,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert len(hass.config_entries.async_entries()) == 1
| |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import *
from direct.task import Task
from math import *
from direct.distributed.ClockDelta import *
from toontown.golf import GolfGlobals
from pandac.PandaModules import LineSegs
AUTO_HIDE_TIMEOUT = 3
class GolfScoreBoard:
notify = directNotify.newCategory('GolfScoreBoard')
def __init__(self, golfCourse):
self.golfCourse = golfCourse
self.numPlayas = len(golfCourse.avIdList)
self.avIdList = golfCourse.avIdList
self.playaTags = []
self.scoreTags = []
self.totalTags = []
self.scoreLabels = []
self.holeLabels = []
self.parLabels = []
self.numExited = 0
self.setup()
def setup(self):
self.scoreboard = DirectFrame(parent=aspect2d, relief=None, geom=DGG.getDefaultDialogGeom(), geom_color=ToontownGlobals.GlobalDialogColor, geom_scale=(1.9, 1, 1.05), pos=(0, 0, 0.375))
self.lines = LineSegs()
self.lines.setColor(0, 0, 0, 1)
self.lines.setThickness(2)
guiModel = loader.loadModel('phase_6/models/golf/golf_gui')
highlight = loader.loadModel('phase_6/models/golf/headPanel')
self.maximizeB = DirectButton(parent=base.a2dBottomRight, pos=(-0.15, 0, 0.15), relief=None, state=DGG.NORMAL, image=(guiModel.find('**/score_card_icon'), guiModel.find('**/score_card_icon_rollover'), guiModel.find('**/score_card_icon_rollover')), image_scale=(0.2, 1, 0.2), command=self.showBoard)
self.vertOffset = 0.13
self.playaTop = 0.12
horzOffset = 0.12
holeTop = 0.3
self.vCenter = 0.025
totScore = 0
totPar = 0
self.lineVStart = -0.465
self.lineHStart = 0.17
self.lineHorOffset = 0.13
self.lineVertOffset = 0.125
self.lineVCenter = 0.025
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
self.minimizeB = DirectButton(parent=self.scoreboard, pos=(0, 0, self.lineHStart - 0.59), relief=None, state=DGG.NORMAL, image=(buttons.find('**/CloseBtn_UP'), buttons.find('**/CloseBtn_DN'), buttons.find('**/CloseBtn_Rllvr')), image_scale=(1, 1, 1), command=self.hideBoard, extraArgs=[None])
self.exitCourseB = DirectButton(parent=self.scoreboard, pos=(0, 0, self.lineHStart - 0.59), relief=None, state=DGG.NORMAL, image=(buttons.find('**/CloseBtn_UP'), buttons.find('**/CloseBtn_DN'), buttons.find('**/CloseBtn_Rllvr')), image_scale=(1, 1, 1), text=TTLocalizer.GolfExitCourse, text_scale=0.04, text_pos=TTLocalizer.GSBexitCourseBPos, command=self.exitCourse)
self.exitCourseB.hide()
self.highlightCur = DirectLabel(parent=self.scoreboard, relief=None, pos=(-0.003, 0, 0.038), image=highlight, image_scale=(1.82, 1, 0.135))
self.titleBar = DirectLabel(parent=self.scoreboard, relief=None, pos=(-0.003, 0, 0.166), color=(0.7, 0.7, 0.7, 0.3), image=highlight, image_scale=(1.82, 1, 0.195))
self.titleBar.show()
self.highlightCur.show()
buttons.removeNode()
guiModel.removeNode()
title = GolfGlobals.getCourseName(self.golfCourse.courseId) + ' - ' + GolfGlobals.getHoleName(self.golfCourse.holeIds[self.golfCourse.curHoleIndex])
self.titleLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(0, 0, holeTop + 0.1), text_align=TextNode.ACenter, text=title, text_scale=TTLocalizer.GSBtitleLabel, text_font=ToontownGlobals.getSignFont(), text_fg=(0, 0.5, 0.125, 1))
self.playaLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart - 0.23, 0, holeTop), text_align=TextNode.ACenter, text=TTLocalizer.GolfHole, text_font=ToontownGlobals.getMinnieFont(), text_scale=0.05)
for holeLIndex in xrange(self.golfCourse.numHoles):
holeLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.055 + horzOffset * holeLIndex, 0, holeTop), text_align=TextNode.ACenter, text='%s' % (holeLIndex + 1), text_scale=0.05)
self.holeLabels.append(holeLabel)
self.totalLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.1 + horzOffset * 9.5, 0, holeTop), text_align=TextNode.ACenter, text=TTLocalizer.GolfTotal, text_font=ToontownGlobals.getMinnieFont(), text_scale=0.05)
self.parTitleLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart - 0.23, 0, holeTop - 0.1), text_align=TextNode.ACenter, text=TTLocalizer.GolfPar, text_font=ToontownGlobals.getMinnieFont(), text_scale=0.05)
for parHoleIndex in xrange(self.golfCourse.numHoles):
parLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.055 + horzOffset * parHoleIndex, 0, holeTop - 0.1), text_align=TextNode.ACenter, text='%s' % GolfGlobals.HoleInfo[self.golfCourse.holeIds[parHoleIndex]]['par'], text_scale=0.05, text_wordwrap=10)
totPar = totPar + GolfGlobals.HoleInfo[self.golfCourse.holeIds[parHoleIndex]]['par']
self.parLabels.append(parLabel)
parLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.1 + horzOffset * 9.5, 0, holeTop - 0.1), text_align=TextNode.ACenter, text='%s' % totPar, text_scale=0.05, text_wordwrap=10)
self.parLabels.append(parLabel)
vert = 0.0
self.numPlayas = len(self.golfCourse.avIdList)
for playaIndex in xrange(self.numPlayas):
name = TTLocalizer.GolfUnknownPlayer
av = base.cr.doId2do.get(self.golfCourse.avIdList[playaIndex])
if av:
name = av.getName()
playaLabel = DirectLabel(parent=self.scoreboard, relief=None, text_align=TextNode.ACenter, text=name, text_scale=0.05, text_wordwrap=9)
self.playaTags.append(playaLabel)
textN = playaLabel.component(playaLabel.components()[0])
if type(textN) == OnscreenText:
try:
if textN.textNode.getWordwrappedWtext() != name:
vert = self.playaTop - self.vertOffset * playaIndex
else:
vert = self.playaTop - self.vertOffset * playaIndex - self.vCenter
except:
vert = self.playaTop - self.vertOffset * playaIndex
self.playaTags[playaIndex].setPos(self.lineVStart - 0.23, 0, vert)
self.notify.debug('self.text height = %f' % self.playaTags[playaIndex].getHeight())
holeIndex = 0
for holeIndex in xrange(self.golfCourse.numHoles):
holeLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.055 + horzOffset * holeIndex, 0, self.playaTop - self.vertOffset * playaIndex - self.vCenter), text_align=TextNode.ACenter, text='-', text_scale=0.05, text_wordwrap=10)
self.scoreTags.append(holeLabel)
holeLabel = DirectLabel(parent=self.scoreboard, relief=None, pos=(self.lineVStart + 0.1 + horzOffset * 9.5, 0, self.playaTop - self.vertOffset * playaIndex - self.vCenter), text_align=TextNode.ACenter, text='-', text_scale=0.05, text_wordwrap=10)
self.totalTags.append(holeLabel)
self.lines.moveTo(self.lineVStart - 0.45, 0, self.lineHStart + 0.19)
self.lines.drawTo(self.lineVStart + 11 * self.lineVertOffset, 0, self.lineHStart + 0.19)
self.lines.moveTo(self.lineVStart - 0.45, 0, self.lineHStart + 0.09)
self.lines.drawTo(self.lineVStart + 11 * self.lineVertOffset, 0, self.lineHStart + 0.09)
self.lines.moveTo(self.lineVStart - 0.45, 0, self.lineHStart)
self.lines.drawTo(self.lineVStart + 11 * self.lineVertOffset, 0, self.lineHStart)
self.lines.moveTo(self.lineVStart - 0.45, 0, self.lineHStart + 0.19)
self.lines.drawTo(self.lineVStart - 0.45, 0, self.lineHStart - 4 * 0.13)
self.lines.moveTo(self.lineVStart, 0, self.lineHStart + 0.19)
self.lines.drawTo(self.lineVStart, 0, self.lineHStart - 4 * 0.13)
for x in xrange(4):
self.lines.moveTo(self.lineVStart - 0.45, 0, self.lineHStart - (x + 1) * self.lineHorOffset)
self.lines.drawTo(self.lineVStart + 11 * self.lineVertOffset + 0.005, 0, self.lineHStart - (x + 1) * self.lineHorOffset)
for y in xrange(10):
self.lines.moveTo(self.lineVStart + y * self.lineVertOffset, 0, self.lineHStart + 0.19)
self.lines.drawTo(self.lineVStart + y * self.lineVertOffset, 0, self.lineHStart - 4 * 0.13)
self.lines.moveTo(self.lineVStart + 11 * self.lineVertOffset, 0, self.lineHStart + 0.19)
self.lines.drawTo(self.lineVStart + 11 * self.lineVertOffset, 0, self.lineHStart - 4 * 0.13)
self.scoreboard.attachNewNode(self.lines.create())
self.hide()
return
def getScoreLabel(self, avIdorIndex, holeNum):
index = None
if avIdorIndex < 100:
index = avIdorIndex
else:
for playaIndex in xrange(self.numPlayas):
if self.golfCourse.avIdList[playaIndex] == avIdorIndex:
index = playaIndex
return self.scoreTags[index * self.golfCourse.numHoles + holeNum]
def update(self):
self.showBoard()
taskMgr.doMethodLater(AUTO_HIDE_TIMEOUT, self.hideBoard, 'hide score board')
def hideBoard(self, task):
self.hide()
def hide(self):
self.scoreboard.hide()
self.maximizeB.show()
def showBoardFinal(self, task = None):
self.exitCourseB.show()
self.minimizeB.hide()
self.showBoard()
def showBoard(self, task = None):
scoreDict = self.golfCourse.scores
x = 0
currentGolfer = self.golfCourse.getCurGolfer()
for playaIndex in xrange(self.numPlayas):
if self.golfCourse.isGameDone():
self.playaTags[playaIndex].setColor(0, 0, 0, 1)
elif currentGolfer == self.golfCourse.avIdList[playaIndex]:
self.highlightCur.setColor(*GolfGlobals.PlayerColors[playaIndex])
self.highlightCur.setAlphaScale(0.4)
self.highlightCur.setPos(-0.003, 0, 0.038 - playaIndex * (self.lineVertOffset + 0.005))
self.highlightCur.show()
else:
self.playaTags[playaIndex].setColor(0, 0, 0, 1)
for avId in self.avIdList:
holeIndex = 0
totScore = 0
playerExited = False
for y in xrange(len(self.golfCourse.exitedAvIdList)):
if self.golfCourse.exitedAvIdList[y] == avId:
self.playaTags[x].setColor(0.7, 0.7, 0.7, 1)
holeIndex = 0
for holeIndex in xrange(self.golfCourse.numHoles):
self.getScoreLabel(self.avIdList[x], holeIndex).setColor(0.7, 0.7, 0.7, 1)
self.totalTags[x].setColor(0.7, 0.7, 0.7, 1)
playerExited = True
if playerExited == False:
for holeIndex in xrange(self.golfCourse.numHoles):
if holeIndex <= self.golfCourse.curHoleIndex:
self.getScoreLabel(avId, holeIndex)['text'] = '%s' % scoreDict[avId][holeIndex]
totScore = totScore + scoreDict[avId][holeIndex]
if self.golfCourse.isGameDone() == False:
if holeIndex == self.golfCourse.curHoleIndex:
self.getScoreLabel(avId, holeIndex).setColor(1, 0, 0, 1)
self.holeLabels[holeIndex].setColor(1, 0, 0, 1)
self.parLabels[holeIndex].setColor(1, 0, 0, 1)
title = GolfGlobals.getCourseName(self.golfCourse.courseId) + ' - ' + GolfGlobals.getHoleName(self.golfCourse.holeIds[self.golfCourse.curHoleIndex])
self.titleLabel['text'] = title
else:
self.getScoreLabel(avId, holeIndex).setColor(0, 0, 0, 1)
self.holeLabels[holeIndex].setColor(0, 0, 0, 1)
self.parLabels[holeIndex].setColor(0, 0, 0, 1)
self.totalTags[x]['text'] = '%s' % totScore
if self.golfCourse.isGameDone():
self.getScoreLabel(avId, self.golfCourse.numHoles - 1).setColor(0, 0, 0, 1)
self.totalTags[x].setColor(1, 0, 0, 1)
x = x + 1
y = 0
if self.golfCourse.isGameDone():
self.parLabels[self.golfCourse.numHoles - 1].setColor(0, 0, 0, 1)
self.holeLabels[self.golfCourse.numHoles - 1].setColor(0, 0, 0, 1)
self.parLabels[self.golfCourse.numHoles].setColor(1, 0, 0, 1)
self.totalLabel.setColor(1, 0, 0, 1)
self.scoreboard.show()
self.maximizeB.hide()
def exitCourse(self):
course = self.golfCourse
self.delete()
course.exitEarly()
def delete(self):
if self.maximizeB:
self.maximizeB.destroy()
self.maximizeB = None
if self.scoreboard:
self.scoreboard.destroy()
self.scoreboard = None
self.golfCourse = None
taskMgr.remove('hide score board')
return
| |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class notebooksCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_environment': ('parent', 'environment_id', 'environment', ),
'create_execution': ('parent', 'execution_id', 'execution', ),
'create_instance': ('parent', 'instance_id', 'instance', ),
'create_runtime': ('parent', 'runtime_id', 'runtime', ),
'create_schedule': ('parent', 'schedule_id', 'schedule', ),
'delete_environment': ('name', ),
'delete_execution': ('name', ),
'delete_instance': ('name', ),
'delete_runtime': ('name', ),
'delete_schedule': ('name', ),
'get_environment': ('name', ),
'get_execution': ('name', ),
'get_instance': ('name', ),
'get_instance_health': ('name', ),
'get_runtime': ('name', ),
'get_schedule': ('name', ),
'is_instance_upgradeable': ('notebook_instance', ),
'list_environments': ('parent', 'page_size', 'page_token', ),
'list_executions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
'list_instances': ('parent', 'page_size', 'page_token', ),
'list_runtimes': ('parent', 'page_size', 'page_token', ),
'list_schedules': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ),
'register_instance': ('parent', 'instance_id', ),
'report_instance_info': ('name', 'vm_id', 'metadata', ),
'report_runtime_event': ('name', 'vm_id', 'event', ),
'reset_instance': ('name', ),
'reset_runtime': ('name', ),
'rollback_instance': ('name', 'target_snapshot', ),
'set_instance_accelerator': ('name', 'type_', 'core_count', ),
'set_instance_labels': ('name', 'labels', ),
'set_instance_machine_type': ('name', 'machine_type', ),
'start_instance': ('name', ),
'start_runtime': ('name', ),
'stop_instance': ('name', ),
'stop_runtime': ('name', ),
'switch_runtime': ('name', 'machine_type', 'accelerator_config', ),
'trigger_schedule': ('name', ),
'update_instance_config': ('name', 'config', ),
'update_shielded_instance_config': ('name', 'shielded_instance_config', ),
'upgrade_instance': ('name', ),
'upgrade_instance_internal': ('name', 'vm_id', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=notebooksCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the notebooks client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
| |
#
# Copyright (c) 2013 Canonical Ltd.
#
# This file is part of: SST (selenium-simple-test)
# https://launchpad.net/selenium-simple-test
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from future import standard_library
standard_library.install_aliases()
from builtins import range
from io import StringIO
import os
import signal
import unittest
import testtools
from sst import (
browsers,
concurrency,
results,
runtests,
tests,
)
class TestConcurrentSuite(testtools.TestCase):
def run_test_concurrently(self, test, success):
res = results.TextTestResult(StringIO(), verbosity=0)
suite = unittest.TestSuite([test])
# Run tests across 2 processes
concurrent_suite = testtools.ConcurrentTestSuite(
suite,
concurrency.fork_for_tests(2)
)
res.startTestRun()
concurrent_suite.run(res)
res.stopTestRun()
self.assertEqual(success, res.wasSuccessful())
self.assertEqual(1, res.testsRun)
return res
def test_pass(self):
res = self.run_test_concurrently(tests.get_case('pass'), True)
self.assertEqual(0, len(res.errors))
self.assertEqual(0, len(res.failures))
def test_fail(self):
res = self.run_test_concurrently(tests.get_case('fail'), False)
self.assertEqual(0, len(res.errors))
self.assertEqual(1, len(res.failures))
def test_error(self):
res = self.run_test_concurrently(tests.get_case('error'), False)
self.assertEqual(1, len(res.errors))
self.assertEqual(0, len(res.failures))
def test_skip(self):
res = self.run_test_concurrently(tests.get_case('skip'), True)
self.assertEqual(0, len(res.errors))
self.assertEqual(0, len(res.failures))
reasons = res.skip_reasons
self.assertEqual(1, len(list(reasons.keys())))
reason, skipped = list(reasons.items())[0]
self.assertEqual('', reason)
self.assertEqual(1, len(skipped))
self.assertEqual('sst.tests.Test.test_skip', skipped[0].id())
def test_skip_reason(self):
res = self.run_test_concurrently(tests.get_case('skip_reason'), True)
self.assertEqual(0, len(res.errors))
self.assertEqual(0, len(res.failures))
reasons = res.skip_reasons
self.assertEqual(1, len(list(reasons.keys())))
reason, skipped = list(reasons.items())[0]
self.assertEqual('Because', reason)
self.assertEqual(1, len(skipped))
self.assertEqual('sst.tests.Test.test_skip_reason', skipped[0].id())
def test_expected_failure(self):
res = self.run_test_concurrently(tests.get_case('expected_failure'),
True)
self.assertEqual(0, len(res.errors))
self.assertEqual(0, len(res.failures))
self.assertEqual(1, len(res.expectedFailures))
def test_unexpected_success(self):
res = self.run_test_concurrently(tests.get_case('unexpected_success'),
False)
self.assertEqual(0, len(res.errors))
self.assertEqual(0, len(res.failures))
self.assertEqual(1, len(res.unexpectedSuccesses))
def test_killed(self):
class Killed(unittest.TestCase):
def test_killed(self):
pid = os.getpid()
os.kill(pid, signal.SIGKILL)
res = self.run_test_concurrently(Killed('test_killed'), False)
self.assertEqual(1, len(res.errors))
self.assertEqual(0, len(res.failures))
class TestConcurrentRunTests(tests.ImportingLocalFilesTest):
"""Smoke integration tests at runtests level."""
def test_pass(self):
tests.write_tree_from_desc('''dir: t
file: t/__init__.py
from sst import loaders
discover = loaders.discoverRegularTests
file: t/test_conc1.py
import unittest
class Test1(unittest.TestCase):
def test_pass_1(self):
self.assertTrue(True)
file: t/test_conc2.py
import unittest
class Test2(unittest.TestCase):
def test_pass_2(self):
self.assertTrue(True)
''')
out = StringIO()
runtests.runtests(
['^t'], 'no results directory used', out,
concurrency_num=2,
browser_factory=browsers.FirefoxFactory(),
)
output = out.getvalue()
self.assertIn('Ran 2 tests', output)
self.assertIn('OK', output)
self.assertNotIn('FAIL', output)
def test_fail(self):
tests.write_tree_from_desc('''dir: t
file: t/__init__.py
from sst import loaders
discover = loaders.discoverRegularTests
file: t/test_fail1.py
import unittest
class TestFail1(unittest.TestCase):
def test_fail_1(self):
self.assertTrue(False)
file: t/test_fail2.py
import unittest
class TestFail2(unittest.TestCase):
def test_fail_2(self):
self.assertTrue(False)
''')
out = StringIO()
runtests.runtests(
['^t'], 'no results directory used', out,
concurrency_num=2,
browser_factory=browsers.FirefoxFactory(),
)
output = out.getvalue()
self.assertIn('Ran 2 tests', output)
self.assertEqual(output.count('Traceback (most recent call last):'), 2)
self.assertIn('FAILED (failures=2)', output)
class PartitionTestCase(testtools.TestCase):
def setUp(self):
super(PartitionTestCase, self).setUp()
self.suite = unittest.TestSuite()
self.suite.addTests([tests.get_case('pass') for i in range(8)])
def test_partition_even_groups(self):
parted_tests = concurrency.partition_tests(self.suite, 4)
self.assertEqual(4, len(parted_tests))
self.assertEqual(2, len(parted_tests[0]))
self.assertEqual(2, len(parted_tests[1]))
self.assertEqual(2, len(parted_tests[2]))
self.assertEqual(2, len(parted_tests[3]))
def test_partition_one_in_each(self):
parted_tests = concurrency.partition_tests(self.suite, 8)
self.assertEqual(8, len(parted_tests))
self.assertEqual(1, len(parted_tests[0]))
def test_partition_all_in_one(self):
parted_tests = concurrency.partition_tests(self.suite, 1)
self.assertEqual(1, len(parted_tests))
self.assertEqual(8, len(parted_tests[0]))
def test_partition_uneven(self):
parted_tests = concurrency.partition_tests(self.suite, 3)
self.assertEqual(3, len(parted_tests))
self.assertEqual(3, len(parted_tests[0]))
self.assertEqual(3, len(parted_tests[1]))
self.assertEqual(2, len(parted_tests[2]))
| |
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import mxnet as mx
import numpy as np
import pytest
import sockeye.constants as C
import sockeye.coverage
import sockeye.rnn_attention
from test.common import gaussian_vector, integer_vector
attention_types = [C.ATT_BILINEAR, C.ATT_DOT, C.ATT_LOC, C.ATT_MLP]
def test_att_bilinear():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_BILINEAR,
num_hidden=None,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=6,
layer_normalization=False,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None)
assert type(attention) == sockeye.rnn_attention.BilinearAttention
assert not attention._input_previous_word
assert attention.num_hidden == 6
def test_att_dot():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_DOT,
num_hidden=2,
input_previous_word=True,
source_num_hidden=4,
query_num_hidden=6,
layer_normalization=False,
config_coverage=None,
is_scaled=False)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None)
assert type(attention) == sockeye.rnn_attention.DotAttention
assert attention._input_previous_word
assert attention.project_source
assert attention.project_query
assert attention.num_hidden == 2
assert attention.is_scaled is False
assert not attention.scale
def test_att_dot_scaled():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_DOT,
num_hidden=16,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=None,
layer_normalization=False,
config_coverage=None,
is_scaled=True)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None)
assert type(attention) == sockeye.rnn_attention.DotAttention
assert attention._input_previous_word
assert attention.project_source
assert attention.project_query
assert attention.num_hidden == 16
assert attention.is_scaled is True
assert attention.scale == 0.25
def test_att_mh_dot():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_MH_DOT,
num_hidden=None,
input_previous_word=True,
source_num_hidden=8,
query_num_hidden=None,
layer_normalization=False,
config_coverage=None,
num_heads=2)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None)
assert type(attention) == sockeye.rnn_attention.MultiHeadDotAttention
assert attention._input_previous_word
assert attention.num_hidden == 8
assert attention.heads == 2
assert attention.num_hidden_per_head == 4
def test_att_fixed():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_FIXED,
num_hidden=None,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=None,
layer_normalization=False,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None)
assert type(attention) == sockeye.rnn_attention.EncoderLastStateAttention
assert attention._input_previous_word
def test_att_loc():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_LOC,
num_hidden=None,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=None,
layer_normalization=False,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=10)
assert type(attention) == sockeye.rnn_attention.LocationAttention
assert attention._input_previous_word
assert attention.max_source_seq_len == 10
def test_att_mlp():
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_MLP,
num_hidden=16,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=None,
layer_normalization=True,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=10)
assert type(attention) == sockeye.rnn_attention.MlpAttention
assert attention._input_previous_word
assert attention.attention_num_hidden == 16
assert attention.dynamic_source_num_hidden == 1
assert attention._ln
assert not attention.coverage
def test_att_cov():
config_coverage = sockeye.coverage.CoverageConfig(type='tanh', num_hidden=5, layer_normalization=True)
config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_COV,
num_hidden=16,
input_previous_word=True,
source_num_hidden=None,
query_num_hidden=None,
layer_normalization=True,
config_coverage=config_coverage)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=10)
assert type(attention) == sockeye.rnn_attention.MlpCovAttention
assert attention._input_previous_word
assert attention.attention_num_hidden == 16
assert attention.dynamic_source_num_hidden == 5
assert attention._ln
assert type(attention.coverage) == sockeye.coverage.ActivationCoverage
@pytest.mark.parametrize("attention_type", attention_types)
def test_attention(attention_type,
batch_size=1,
encoder_num_hidden=2,
decoder_num_hidden=2):
# source: (batch_size, seq_len, encoder_num_hidden)
source = mx.sym.Variable("source")
# source_length: (batch_size,)
source_length = mx.sym.Variable("source_length")
source_seq_len = 3
config_attention = sockeye.rnn_attention.AttentionConfig(type=attention_type,
num_hidden=2,
input_previous_word=False,
source_num_hidden=2,
query_num_hidden=2,
layer_normalization=False,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=source_seq_len)
attention_state = attention.get_initial_state(source_length, source_seq_len)
attention_func = attention.on(source, source_length, source_seq_len)
attention_input = attention.make_input(0, mx.sym.Variable("word_vec_prev"), mx.sym.Variable("decoder_state"))
attention_state = attention_func(attention_input, attention_state)
sym = mx.sym.Group([attention_state.context, attention_state.probs])
executor = sym.simple_bind(ctx=mx.cpu(),
source=(batch_size, source_seq_len, encoder_num_hidden),
source_length=(batch_size,),
decoder_state=(batch_size, decoder_num_hidden))
# TODO: test for other inputs (that are not equal at each source position)
executor.arg_dict["source"][:] = np.asarray([[[1., 2.], [1., 2.], [3., 4.]]])
executor.arg_dict["source_length"][:] = np.asarray([2.0])
executor.arg_dict["decoder_state"][:] = np.asarray([[5, 6]])
exec_output = executor.forward()
context_result = exec_output[0].asnumpy()
attention_prob_result = exec_output[1].asnumpy()
# expecting uniform attention_weights of 0.5: 0.5 * seq1 + 0.5 * seq2
assert np.isclose(context_result, np.asarray([[1., 2.]])).all()
# equal attention to first two and no attention to third
assert np.isclose(attention_prob_result, np.asarray([[0.5, 0.5, 0.]])).all()
coverage_cases = [("gru", 10), ("tanh", 4), ("count", 1), ("sigmoid", 1), ("relu", 30)]
@pytest.mark.parametrize("attention_coverage_type,attention_coverage_num_hidden", coverage_cases)
def test_coverage_attention(attention_coverage_type,
attention_coverage_num_hidden,
batch_size=3,
encoder_num_hidden=2,
decoder_num_hidden=2):
# source: (batch_size, seq_len, encoder_num_hidden)
source = mx.sym.Variable("source")
# source_length: (batch_size, )
source_length = mx.sym.Variable("source_length")
source_seq_len = 10
config_coverage = sockeye.coverage.CoverageConfig(type=attention_coverage_type,
num_hidden=attention_coverage_num_hidden,
layer_normalization=False)
config_attention = sockeye.rnn_attention.AttentionConfig(type="coverage",
num_hidden=5,
input_previous_word=False,
source_num_hidden=encoder_num_hidden,
query_num_hidden=decoder_num_hidden,
layer_normalization=False,
config_coverage=config_coverage)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=source_seq_len)
attention_state = attention.get_initial_state(source_length, source_seq_len)
attention_func = attention.on(source, source_length, source_seq_len)
attention_input = attention.make_input(0, mx.sym.Variable("word_vec_prev"), mx.sym.Variable("decoder_state"))
attention_state = attention_func(attention_input, attention_state)
sym = mx.sym.Group([attention_state.context, attention_state.probs, attention_state.dynamic_source])
source_shape = (batch_size, source_seq_len, encoder_num_hidden)
source_length_shape = (batch_size,)
decoder_state_shape = (batch_size, decoder_num_hidden)
executor = sym.simple_bind(ctx=mx.cpu(),
source=source_shape,
source_length=source_length_shape,
decoder_state=decoder_state_shape)
source_length_vector = integer_vector(shape=source_length_shape, max_value=source_seq_len)
executor.arg_dict["source"][:] = gaussian_vector(shape=source_shape)
executor.arg_dict["source_length"][:] = source_length_vector
executor.arg_dict["decoder_state"][:] = gaussian_vector(shape=decoder_state_shape)
exec_output = executor.forward()
context_result = exec_output[0].asnumpy()
attention_prob_result = exec_output[1].asnumpy()
dynamic_source_result = exec_output[2].asnumpy()
expected_probs = (1. / source_length_vector).reshape((batch_size, 1))
assert context_result.shape == (batch_size, encoder_num_hidden)
assert attention_prob_result.shape == (batch_size, source_seq_len)
assert dynamic_source_result.shape == (batch_size, source_seq_len, attention_coverage_num_hidden)
assert (np.sum(np.isclose(attention_prob_result, expected_probs), axis=1) == source_length_vector).all()
def test_last_state_attention(batch_size=1,
encoder_num_hidden=2):
"""
EncoderLastStateAttention is a bit different from other attention mechanisms as it doesn't take a query argument
and doesn't return a probability distribution over the inputs (aka alignment).
"""
# source: (batch_size, seq_len, encoder_num_hidden)
source = mx.sym.Variable("source")
# source_length: (batch_size,)
source_length = mx.sym.Variable("source_length")
source_seq_len = 3
config_attention = sockeye.rnn_attention.AttentionConfig(type="fixed",
num_hidden=0,
input_previous_word=False,
source_num_hidden=2,
query_num_hidden=2,
layer_normalization=False,
config_coverage=None)
attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=source_seq_len)
attention_state = attention.get_initial_state(source_length, source_seq_len)
attention_func = attention.on(source, source_length, source_seq_len)
attention_input = attention.make_input(0, mx.sym.Variable("word_vec_prev"), mx.sym.Variable("decoder_state"))
attention_state = attention_func(attention_input, attention_state)
sym = mx.sym.Group([attention_state.context, attention_state.probs])
executor = sym.simple_bind(ctx=mx.cpu(),
source=(batch_size, source_seq_len, encoder_num_hidden),
source_length=(batch_size,))
# TODO: test for other inputs (that are not equal at each source position)
executor.arg_dict["source"][:] = np.asarray([[[1., 2.], [1., 2.], [3., 4.]]])
executor.arg_dict["source_length"][:] = np.asarray([2.0])
exec_output = executor.forward()
context_result = exec_output[0].asnumpy()
attention_prob_result = exec_output[1].asnumpy()
# expecting attention on last state based on source_length
assert np.isclose(context_result, np.asarray([[1., 2.]])).all()
assert np.isclose(attention_prob_result, np.asarray([[0., 1.0, 0.]])).all()
def test_get_context_and_attention_probs():
source = mx.sym.Variable('source')
source_length = mx.sym.Variable('source_length')
attention_scores = mx.sym.Variable('scores')
context, att_probs = sockeye.rnn_attention.get_context_and_attention_probs(
source,
source_length,
attention_scores,
C.DTYPE_FP32)
sym = mx.sym.Group([context, att_probs])
assert len(sym.list_arguments()) == 3
batch_size, seq_len, num_hidden = 32, 50, 100
# data
source_nd = mx.nd.random_normal(shape=(batch_size, seq_len, num_hidden))
source_length_np = np.random.randint(1, seq_len+1, (batch_size,))
source_length_nd = mx.nd.array(source_length_np)
scores_nd = mx.nd.zeros((batch_size, seq_len, 1))
in_shapes, out_shapes, _ = sym.infer_shape(source=source_nd.shape,
source_length=source_length_nd.shape,
scores=scores_nd.shape)
assert in_shapes == [(batch_size, seq_len, num_hidden), (batch_size, seq_len, 1), (batch_size,)]
assert out_shapes == [(batch_size, num_hidden), (batch_size, seq_len)]
context, probs = sym.eval(source=source_nd,
source_length=source_length_nd,
scores=scores_nd)
expected_probs = (1. / source_length_nd).reshape((batch_size, 1)).asnumpy()
assert (np.sum(np.isclose(probs.asnumpy(), expected_probs), axis=1) == source_length_np).all()
| |
# -*- coding: utf-8 -*-
'''
Generate Pillar data from Django models through the Django ORM
:maintainer: Micah Hausler <micah.hausler@gmail.com>
:maturity: new
Configuring the django_orm ext_pillar
=====================================
To use this module, your Django project must be on the salt master server with
database access. This assumes you are using virtualenv with all the project's
requirements installed.
.. code-block:: yaml
ext_pillar:
- django_orm:
pillar_name: my_application
project_path: /path/to/project/
settings_module: my_application.settings
env_file: /path/to/env/file.sh
# Optional: If your project is not using the system python,
# add your virtualenv path below.
env: /path/to/virtualenv/
django_app:
# Required: the app that is included in INSTALLED_APPS
my_application.clients:
# Required: the model name
Client:
# Required: model field to use as the key in the rendered
# Pillar. Must be unique; must also be included in the
# ``fields`` list below.
name: shortname
# Optional:
# See Django's QuerySet documentation for how to use .filter()
filter: {'kw': 'args'}
# Required: a list of field names
# List items will be used as arguments to the .values() method.
# See Django's QuerySet documentation for how to use .values()
fields:
- field_1
- field_2
This would return pillar data that would look like
.. code-block:: yaml
my_application:
my_application.clients:
Client:
client_1:
field_1: data_from_field_1
field_2: data_from_field_2
client_2:
field_1: data_from_field_1
field_2: data_from_field_2
As another example, data from multiple database tables can be fetched using
Django's regular lookup syntax. Note, using ManyToManyFields will not currently
work since the return from values() changes if a ManyToMany is present.
.. code-block:: yaml
ext_pillar:
- django_orm:
pillar_name: djangotutorial
project_path: /path/to/mysite
settings_module: mysite.settings
django_app:
mysite.polls:
Choices:
name: poll__question
fields:
- poll__question
- poll__id
- choice_text
- votes
Module Documentation
====================
'''
import logging
import os
import sys
import salt.exceptions
HAS_VIRTUALENV = False
try:
import virtualenv
HAS_VIRTUALENV = True
except ImportError:
pass
log = logging.getLogger(__name__)
def __virtual__():
return 'django_orm'
def ext_pillar(minion_id,
pillar,
pillar_name,
project_path,
settings_module,
django_app,
env=None,
env_file=None,
*args,
**kwargs):
'''
Connect to a Django database through the ORM and retrieve model fields
:type pillar_name: str
:param pillar_name: The name of the pillar to be returned
:type project_path: str
:param project_path: The full path to your Django project (the directory
manage.py is in)
:type settings_module: str
:param settings_module: The settings module for your project. This can be
found in your manage.py file
:type django_app: str
:param django_app: A dictionary containing your apps, models, and fields
:type env: str
:param env: The full path to the virtualenv for your Django project
:type env_file: str
:param env_file: An optional bash file that sets up your environment. The
file is run in a subprocess and the changed variables are then added
'''
if not os.path.isdir(project_path):
log.error('Django project dir: {0!r} not a directory!'.format(
project_path))
return {}
if HAS_VIRTUALENV and env is not None and os.path.isdir(env):
for path in virtualenv.path_locations(env):
if not os.path.isdir(path):
log.error('Virtualenv {0} not a directory!'.format(path))
return {}
# load the virtualenv first
sys.path.insert(0,
os.path.join(
virtualenv.path_locations(env)[1],
'site-packages'))
# load the django project
sys.path.append(project_path)
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
if env_file is not None:
import subprocess
base_env = {}
proc = subprocess.Popen(['bash', '-c', 'env'], stdout=subprocess.PIPE)
for line in proc.stdout:
(key, _, value) = line.partition('=')
base_env[key] = value
command = ['bash', '-c', 'source {0} && env'.format(env_file)]
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
for line in proc.stdout:
(key, _, value) = line.partition('=')
# only add a key if it is different or doesn't already exist
if key not in base_env or base_env[key] != value:
os.environ[key] = value.rstrip('\n')
log.debug('Adding {0} = {1} to Django environment'.format(
key,
value.rstrip('\n')))
try:
from django.db.models.loading import get_model
django_pillar = {}
for proj_app, models in django_app.iteritems():
_, _, app = proj_app.rpartition('.')
django_pillar[app] = {}
for model_name, model_meta in models.iteritems():
model_orm = get_model(app, model_name)
if model_orm is None:
raise salt.exceptions.SaltException(
"Django model '{0}' not found in app '{1}'."
.format(app, model_name))
pillar_for_model = django_pillar[app][model_orm.__name__] = {}
name_field = model_meta['name']
fields = model_meta['fields']
if 'filter' in model_meta.keys():
qs = (model_orm.objects
.filter(**model_meta['filter'])
.values(*fields))
else:
qs = model_orm.objects.values(*fields)
for model in qs:
# Check that the human-friendly name given is valid (will
# be able to pick up a value from the query) and unique
# (since we're using it as the key in a dictionary)
if not name_field in model:
raise salt.exceptions.SaltException(
"Name '{0}' not found in returned fields.".format(
name_field))
if model[name_field] in pillar_for_model:
raise salt.exceptions.SaltException(
"Value for '{0}' is not unique: {0}".format(
model[name_field]))
pillar_for_model[model[name_field]] = model
return {pillar_name: django_pillar}
except ImportError as e:
log.error('Failed to import library: {0}'.format(e.message))
return {}
except Exception as e:
log.error('Failed on Error: {0}'.format(e.message))
log.debug('django_orm traceback', exc_info=True)
return {}
| |
# This file consists of testing for functions that generate messages/strings
# and convert data into another format
import sys
sys.path.append('../..')
# Libraries
# Standard library
import unittest
from unittest.mock import patch
from random import randint
# Local
import helper_random as r
from helper_mock import (generate_lab_mock,
generate_experiment_mock,
generate_observation_mock,
generate_user_mock,
generate_class_mock)
from lemur.utility_generate_and_convert import (check_existence,
generate_err_msg,
generate_lab_id,
generate_experiment_id,
generate_observation_id,
generate_class_id,
generate_user_name,
decompose_lab_id,
decompose_class_id,
serialize_lab_list,
serialize_experiment_list,
serialize_user_list,
serialize_class_list,
tranlate_term_code_to_semester,
cleanup_class_data,
pack_labinfo_sent_from_client,
change_observation_organization)
from lemur.utility_find_and_get import (get_lab,
get_experiment,
get_user,
get_role,
get_power,
get_class,
get_all_lab,
get_all_experiment,
get_all_user,
get_all_class,
find_all_observations_for_labs)
# This file consists of functions that generate strings and convert data format
class UnitTestUtilityGenerateAndConvert(unittest.TestCase):
# this is automatically called for us when we run the test
def setUp(self):
pass
# tidy up after a test has been run
def tearDown(self):
pass
def test_check_existence(self):
key1 = 'key1'
key2 = 'key2'
key3 = 'key3'
form = {key1: 'value1', key2: 'value2'}
success_msg = ''
err_msg = key3 + ' is not defined\n'
self.assertEqual(check_existence(form, key1, key2), success_msg)
self.assertEqual(check_existence(form, key1, key3), err_msg)
def test_generate_err_msg(self):
name1 = 'name1'
value1 = 'value1'
name2 = 'name2'
value2 = 'value2'
expected_msg = (name1+':'+value1+' and '+name2 +
':'+value2+' are different')
self.assertEqual(generate_err_msg(name1, value1, name2, value2),
expected_msg)
def test_generate_lab_id(self):
lab_name = r.randlength_word()
class_id = r.rand_classid()
expected_lab_id = lab_name + ':' + class_id
generated_lab_id = generate_lab_id(lab_name, class_id)
self.assertEqual(generated_lab_id, expected_lab_id)
def test_generate_experiment_id(self):
lab_id = r.randlength_word()
experiment_name = r.randlength_word()
expected_experiment_id = lab_id+':'+experiment_name
generated_experiment_id = generate_experiment_id(lab_id, experiment_name)
self.assertEqual(generated_experiment_id, expected_experiment_id)
def test_generate_observation_id(self):
experiment_id = r.randlength_word()
student_name = r.randlength_word()
expected_observation_id = experiment_id+':'+student_name
self.assertEqual(generate_observation_id(experiment_id, student_name),
expected_observation_id)
def test_generate_class_id(self):
class_name = r.randlength_word()
class_time = r.rand_classtime()
expected_class_id = class_name+'_'+class_time
self.assertEqual(generate_class_id(class_name, class_time),
expected_class_id)
def test_generate_user_name(self):
first_name = r.randlength_word()
last_name = r.randlength_word()
self.assertEqual(first_name+' '+last_name, generate_user_name(first_name, last_name))
def test_decompose_lab_id(self):
lab_name = r.randlength_word()
class_id = r.rand_classid()
lab_id = generate_lab_id(lab_name, class_id)
lab_info = decompose_lab_id(lab_id)
self.assertEqual(lab_info['lab_name'], lab_name)
self.assertEqual(lab_info['class_id'], class_id)
def test_decompose_class_id(self):
class_name = r.randlength_word()
class_time = r.randlength_word()
class_id = generate_class_id(class_name, class_time)
class_info = decompose_class_id(class_id)
self.assertEqual(class_info['class_name'], class_name)
self.assertEqual(class_info['class_time'], class_time)
# Get all objects of one class and then serialize them into
# a list of python dictionary
@patch('test_unit_utility_generate_and_convert.r.create_lab')
def test_serialize_lab_list(self, create_lab_mock):
lab_mock_list = []
create_lab_mock.return_value = generate_lab_mock()
for i in range(r.rand_round()):
lab_mock_list.append(create_lab_mock())
lab_list_serialized = serialize_lab_list(lab_mock_list)
for i in range(len(lab_list_serialized)):
self.assertEqual(lab_mock_list[i].name, lab_list_serialized[i]['lab_name'])
self.assertEqual(lab_mock_list[i].description, lab_list_serialized[i]['description'])
self.assertEqual(lab_mock_list[i].status, lab_list_serialized[i]['status'])
self.assertEqual(len(lab_mock_list[i].experiments), lab_list_serialized[i]['experiments'])
@patch('test_unit_utility_generate_and_convert.r.create_experiment')
def test_serialize_experiment_list(self, create_experiment_mock):
experiment_mock_list = []
create_experiment_mock.return_value = generate_experiment_mock()
for i in range(r.rand_round()):
experiment_mock_list.append(create_experiment_mock())
experiment_list_serialized = serialize_experiment_list(experiment_mock_list)
for i in range(len(experiment_list_serialized)):
self.assertEqual(experiment_mock_list[i].name, experiment_list_serialized[i]['experiment_name'])
self.assertEqual(experiment_mock_list[i].description, experiment_list_serialized[i]['description'])
self.assertEqual(experiment_mock_list[i].order, experiment_list_serialized[i]['order'])
self.assertEqual(experiment_mock_list[i].value_type, experiment_list_serialized[i]['value_type'])
self.assertEqual(experiment_mock_list[i].value_range, experiment_list_serialized[i]['value_range'])
self.assertEqual(experiment_mock_list[i].value_candidates,
experiment_list_serialized[i]['value_candidates'])
@patch('test_unit_utility_generate_and_convert.r.create_user')
def test_serialize_user_list(self, create_user_mock):
user_mock_list = []
create_user_mock.return_value = generate_user_mock()
for i in range(r.rand_round()):
user_mock_list.append(create_user_mock())
user_list_serialized = serialize_user_list(user_mock_list)
for i in range(len(user_list_serialized)):
self.assertEqual(user_mock_list[i].id, user_list_serialized[i]['username'])
self.assertEqual(user_mock_list[i].name, user_list_serialized[i]['name'])
@patch('test_unit_utility_generate_and_convert.r.create_class')
def test_serialize_class_list(self, create_class_mock):
class_mock_list = []
create_class_mock.return_value = generate_class_mock()
for i in range(r.rand_round()):
class_mock_list.append(create_class_mock())
class_list_serialized = serialize_class_list(class_mock_list)
for i in range(len(class_list_serialized)):
self.assertEqual(class_mock_list[i].id, class_list_serialized[i]['id'])
self.assertEqual(class_mock_list[i].name, class_list_serialized[i]['name'])
self.assertEqual(class_mock_list[i].time, class_list_serialized[i]['time'])
def test_change_observation_organization(self):
observations_group_by_experiment_name = r.rand_observations_group_by_experiment_name()
observations_group_by_student = change_observation_organization(observations_group_by_experiment_name)
self.assertEqual(len(observations_group_by_student), len(observations_group_by_experiment_name[0]['observations']))
# Convert a term code into a semester name
# e.g. 201701 -> FALL2017
def test_tranlate_term_code_to_semester(self):
term_code = '201701'
term_code2 = '203103'
semester = 'FALL2016'
semester2 = 'SPRING2031'
self.assertEqual(tranlate_term_code_to_semester(term_code), semester)
self.assertEqual(tranlate_term_code_to_semester(term_code2), semester2)
def test_cleanup_class_data(self):
# A snippet from real data
class_data = [{'course_id': '11069', 'term_code': '201701', 'subject': 'BIOL', 'course_number': '331', 'section': 'F', 'section_type': 'Lecture', 'instructors': [{"username": "prof1", "last_name": "Prof", "first_name": "One"}]},
{'course_id': '10236', 'term_code': '201701', 'subject': 'BIOL', 'course_number': '101', 'section': 'F22', 'section_type': 'Lab', 'instructors': [{"username": "prof2", "last_name": "Prof", "first_name": "Two"}]},
{'course_id': '10447', 'term_code': '201701', 'subject': 'BIOL', 'course_number': '101', 'section': 'F01', 'section_type': 'Lab Lecture', 'instructors': [{"username": "prof3", "last_name": "Prof", "first_name": "Three"},
{"username": "prof4", "last_name": "Prof", "first_name": "Four"},
{"username": "prof5", "last_name": "Prof", "first_name": "Five"}]},
{'course_id': '10010', 'term_code': '201701', 'subject': 'BIOL', 'course_number': '470', 'section': 'YJS', 'section_type': 'Ind. study', 'instructors': [{"username": "prof6", "last_name": "Prof", "first_name": "Six"}]}
]
cleaned_class_data = cleanup_class_data(class_data)
course_numbers = [c['course_number'] for c in cleaned_class_data]
# 470 should have been removed
self.assertNotIn('470', course_numbers)
# This doesn't check all the variables passed into lab_info
def test_pack_labinfo_sent_from_client(self):
clinet_form = {'labName': r.randlength_word(),
'classId': r.rand_classid(),
'professorName': r.randlength_word(),
'labDescription': r.randlength_word(),
'labQuestions': randint(1, 100)
}
lab_info, err_msg = pack_labinfo_sent_from_client(clinet_form)
self.assertEqual(len(lab_info['experiments']), clinet_form['labQuestions'], err_msg)
if __name__ == '__main__':
unittest.main()
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from neutron.openstack.common import log as logging
from oslo.config import cfg
from oslo.serialization import jsonutils
from requests import auth
LOG = logging.getLogger(__name__)
cfg.CONF.import_opt(
'odl_username',
'gbpservice.neutron.services.grouppolicy.drivers.odl.config',
group='odl_driver'
)
cfg.CONF.import_opt(
'odl_password',
'gbpservice.neutron.services.grouppolicy.drivers.odl.config',
group='odl_driver'
)
cfg.CONF.import_opt(
'odl_host',
'gbpservice.neutron.services.grouppolicy.drivers.odl.config',
group='odl_driver'
)
cfg.CONF.import_opt(
'odl_port',
'gbpservice.neutron.services.grouppolicy.drivers.odl.config',
group='odl_driver'
)
class OdlManager(object):
"""Class to manage ODL translations and workflow.
This class manages translation from Neutron objects to APIC
managed objects and contains workflows to implement these
translations.
"""
def __init__(self):
LOG.info(_("Configured ODL username: %s"),
cfg.CONF.odl_driver.odl_username)
LOG.info(_("Configured ODL password: %s"),
cfg.CONF.odl_driver.odl_password)
LOG.info(_("Configured ODL host: %s"),
cfg.CONF.odl_driver.odl_host)
LOG.info(_("Configured ODL port: %s"),
cfg.CONF.odl_driver.odl_port)
self._username = cfg.CONF.odl_driver.odl_username
self._password = cfg.CONF.odl_driver.odl_password
self._host = cfg.CONF.odl_driver.odl_host
self._port = cfg.CONF.odl_driver.odl_port
self._headers = {
'Content-type': 'application/yang.data+json',
'Accept': 'application/yang.data+json',
}
self._base_url = (
"http://%(host)s:%(port)s/restconf" %
{'host': self._host, 'port': self._port}
)
self._reg_ep_url = (
self._base_url +
'/operations/endpoint:register-endpoint'
)
self._unreg_ep_url = (
self._base_url +
'/operations/endpoint:unregister-endpoint'
)
self._policy_url = (
self._base_url +
'/config/policy:tenants/policy:tenant/%(tenant_id)s'
)
self._action_url = (
self._policy_url +
'/subject-feature-instances/action-instance/%(action)s'
)
self._classifier_url = (
self._policy_url +
'/subject-feature-instances/classifier-instance/%(classifier)s'
)
self._l3ctx_url = (
self._policy_url +
'/l3-context/%(l3ctx)s'
)
self._l2bd_url = (
self._policy_url +
'/l2-bridge-domain/%(l2bd)s'
)
self._l2fd_url = (
self._policy_url +
'/l2-flood-domain/%(l2fd)s'
)
self._epg_url = (
self._policy_url +
'/policy:endpoint-group/%(epg)s'
)
self._subnet_url = (
self._policy_url +
'/subnet/%(subnet)s'
)
self._contract_url = (
self._policy_url +
'/policy:contract/%(contract)s'
)
def _convert2ascii(self, obj):
if isinstance(obj, dict):
return {self._convert2ascii(key): self._convert2ascii(value) for
key, value in obj.iteritems()}
elif isinstance(obj, list):
return [self._convert2ascii(element) for element in obj]
elif isinstance(obj, unicode):
return obj.encode('ascii', 'ignore')
else:
return obj
def _sendjson(self, method, url, headers, obj=None):
"""Send json to the ODL controller."""
medium = self._convert2ascii(obj) if obj else None
url = self._convert2ascii(url)
data = (
jsonutils.dumps(medium, indent=4, sort_keys=True) if medium
else None
)
LOG.debug("=========================================================")
LOG.debug("Sending METHOD (%(method)s) URL (%(url)s)",
{'method': method, 'url': url})
LOG.debug("(%(data)s)", {'data': data})
LOG.debug("=========================================================")
r = requests.request(
method,
url=url,
headers=headers,
data=data,
auth=auth.HTTPBasicAuth(self._username,
self._password)
)
r.raise_for_status()
def _is_tenant_created(self, tenant_id):
url = self._convert2ascii(self._policy_url % {'tenant_id': tenant_id})
r = requests.request(
'get',
url=url,
headers=self._headers,
auth=auth.HTTPBasicAuth(self._username,
self._password)
)
if r.status_code == 200:
return True
elif r.status_code == 404:
return False
else:
r.raise_for_status()
def register_endpoints(self, endpoints):
for ep in endpoints:
data = {"input": ep}
self._sendjson('post', self._reg_ep_url, self._headers, data)
def unregister_endpoints(self, endpoints):
for ep in endpoints:
data = {"input": ep}
self._sendjson('post', self._unreg_ep_url, self._headers, data)
def create_update_tenant(self, tenant_id, tenant):
url = (self._policy_url % {'tenant_id': tenant_id})
data = {"tenant": tenant}
self._sendjson('put', url, self._headers, data)
def create_action(self, tenant_id, action):
"""Create policy action"""
self._touch_tenant(tenant_id)
url = (self._action_url %
{'tenant_id': tenant_id, 'action': action['name']})
data = {"action-instance": action}
self._sendjson('put', url, self._headers, data)
def delete_action(self, tenant_id, action):
"""Delete policy action"""
url = (self._action_url %
{'tenant_id': tenant_id, 'action': action['name']})
self._sendjson('delete', url, self._headers)
def create_classifier(self, tenant_id, classifier):
"""Create policy classifier"""
self._touch_tenant(tenant_id)
url = (self._classifier_url %
{'tenant_id': tenant_id, 'classifier': classifier['name']})
data = {"classifier-instance": classifier}
self._sendjson('put', url, self._headers, data)
def delete_classifier(self, tenant_id, classifier):
"""Delete policy classifier"""
url = (self._classifier_url %
{'tenant_id': tenant_id, 'classifier': classifier['name']})
self._sendjson('delete', url, self._headers)
def create_update_l3_context(self, tenant_id, l3ctx):
self._touch_tenant(tenant_id)
url = (self._l3ctx_url %
{'tenant_id': tenant_id, 'l3ctx': l3ctx['id']})
data = {"l3-context": l3ctx}
self._sendjson('put', url, self._headers, data)
def delete_l3_context(self, tenant_id, l3ctx):
url = (self._l3ctx_url %
{'tenant_id': tenant_id, 'l3ctx': l3ctx['id']})
self._sendjson('delete', url, self._headers)
def create_update_l2_bridge_domain(self, tenant_id, l2bd):
self._touch_tenant(tenant_id)
url = (self._l2bd_url %
{'tenant_id': tenant_id, 'l2bd': l2bd['id']})
data = {"l2-bridge-domain": l2bd}
self._sendjson('put', url, self._headers, data)
def delete_l2_bridge_domain(self, tenant_id, l2bd):
url = (self._l2bd_url %
{'tenant_id': tenant_id, 'l2bd': l2bd['id']})
self._sendjson('delete', url, self._headers)
def create_update_l2_flood_domain(self, tenant_id, l2fd):
self._touch_tenant(tenant_id)
url = (self._l2fd_url %
{'tenant_id': tenant_id, 'l2fd': l2fd['id']})
data = {"l2-flood-domain": l2fd}
self._sendjson('put', url, self._headers, data)
def delete_l2_flood_domain(self, tenant_id, l2fd):
url = (self._l2fd_url %
{'tenant_id': tenant_id, 'l2fd': l2fd['id']})
self._sendjson('delete', url, self._headers)
def create_update_endpoint_group(self, tenant_id, epg):
self._touch_tenant(tenant_id)
url = (self._epg_url %
{'tenant_id': tenant_id, 'epg': epg['id']})
data = {"endpoint-group": epg}
self._sendjson('put', url, self._headers, data)
def delete_endpoint_group(self, tenant_id, epg):
url = (self._epg_url %
{'tenant_id': tenant_id, 'epg': epg['id']})
self._sendjson('delete', url, self._headers)
def create_update_subnet(self, tenant_id, subnet):
self._touch_tenant(tenant_id)
url = (self._subnet_url %
{'tenant_id': tenant_id, 'subnet': subnet['id']})
data = {"subnet": subnet}
self._sendjson('put', url, self._headers, data)
def delete_subnet(self, tenant_id, subnet):
url = (self._subnet_url %
{'tenant_id': tenant_id, 'subnet': subnet['id']})
self._sendjson('delete', url, self._headers)
def create_update_contract(self, tenant_id, contract):
url = (self._contract_url %
{'tenant_id': tenant_id, 'contract': contract['id']})
data = {"contract": contract}
self._sendjson('put', url, self._headers, data)
def _touch_tenant(self, tenant_id):
tenant = {
"id": tenant_id
}
if not self._is_tenant_created(tenant_id):
self.create_update_tenant(tenant_id, tenant)
| |
import json
import os
import subprocess
from urllib2 import urlopen
import ccmlib.common as common
from dtest import warning
from distutils.version import LooseVersion
JOLOKIA_JAR = os.path.join('lib', 'jolokia-jvm-1.2.3-agent.jar')
CLASSPATH_SEP = ';' if common.is_win() else ':'
JVM_OPTIONS = "jvm.options"
def jolokia_classpath():
if 'JAVA_HOME' in os.environ:
tools_jar = os.path.join(os.environ['JAVA_HOME'], 'lib', 'tools.jar')
return CLASSPATH_SEP.join((tools_jar, JOLOKIA_JAR))
else:
warning("Environment variable $JAVA_HOME not present: jmx-based " +
"tests may fail because of missing $JAVA_HOME/lib/tools.jar.")
return JOLOKIA_JAR
def java_bin():
if 'JAVA_HOME' in os.environ:
return os.path.join(os.environ['JAVA_HOME'], 'bin', 'java')
else:
return 'java'
def make_mbean(package, type, **kwargs):
'''
Builds the name for an mbean.
`package` is appended to the org.apache.cassandra domain.
`type` is used as the 'type' property.
All other keyword arguments are used as properties in the mbean's name.
Example usage:
>>> make_mbean('db', 'IndexSummaries')
'org.apache.cassandra.db:type=IndexSummaries'
>>> make_mbean('metrics', type='ColumnFamily', name='MemtableColumnsCount', keyspace='ks', scope='table')
'org.apache.cassandra.metrics:type=ColumnFamily,keyspace=ks,name=MemtableColumnsCount,scope=table'
'''
rv = 'org.apache.cassandra.%s:type=%s' % (package, type)
if kwargs:
rv += ',' + ','.join('{k}={v}'.format(k=k, v=v)
for k, v in kwargs.iteritems())
return rv
def enable_jmx_ssl(node,
require_client_auth=False,
disable_user_auth=True,
keystore=None,
keystore_password=None,
truststore=None,
truststore_password=None):
"""
Sets up a node (currently via the cassandra-env file) to use SSL for JMX connections
"""
# mandatory replacements when enabling SSL
replacement_list = [
('\$env:JVM_OPTS="\$env:JVM_OPTS -Dcassandra.jmx.local.port=$JMX_PORT")',
'#$env:JVM_OPTS="$env:JVM_OPTS -Dcassandra.jmx.local.port=$JMX_PORT"'),
('#\$env:JVM_OPTS="\$env:JVM_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT"',
'$env:JVM_OPTS="$env:JVM_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT"'),
('#\$env:JVM_OPTS="\$env:JVM_OPTS -Dcom.sun.management.jmxremote.ssl=true"',
'$env:JVM_OPTS="$env:JVM_OPTS -Dcom.sun.management.jmxremote.ssl=true"')
] if common.is_win() else [
('LOCAL_JMX=yes', 'LOCAL_JMX=no'),
('#JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.ssl=true"',
'JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl=true"'),
('JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.rmi.port=\$JMX_PORT"',
'#JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT"')
]
if require_client_auth:
if common.is_win():
replacement_list.append(('#\$env:JVM_OPTS="\$env:JVM_OPTS -Dcom.sun.management.jmxremote.ssl.need.client.auth=true"',
'$env:JVM_OPTS="$env:JVM_OPTS -Dcom.sun.management.jmxremote.ssl.need.client.auth=true"'))
else:
replacement_list.append(('#JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.ssl.need.client.auth=true"',
'JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.ssl.need.client.auth=true"'))
if keystore:
if common.is_win():
replacement_list.append(('#\$env:JVM_OPTS="\$env:JVM_OPTS -Djavax.net.ssl.keyStore=C:/keystore"',
'$env:JVM_OPTS="$env:JVM_OPTS -Djavax.net.ssl.keyStore={path}"'.format(path=keystore)))
else:
replacement_list.append(('#JVM_OPTS="\$JVM_OPTS -Djavax.net.ssl.keyStore=/path/to/keystore"',
'JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.keyStore={path}"'.format(path=keystore)))
if keystore_password:
if common.is_win():
replacement_list.append(('#\$env:JVM_OPTS="\$env:JVM_OPTS -Djavax.net.ssl.keyStorePassword=<keystore-password>"',
'$env:JVM_OPTS="$env:JVM_OPTS -Djavax.net.ssl.keyStorePassword={password}"'.format(password=keystore_password)))
else:
replacement_list.append(('#JVM_OPTS="\$JVM_OPTS -Djavax.net.ssl.keyStorePassword=<keystore-password>"',
'JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.keyStorePassword={password}"'.format(password=keystore_password)))
if truststore:
if common.is_win():
replacement_list.append(('#\$env:JVM_OPTS="\$env:JVM_OPTS -Djavax.net.ssl.trustStore=C:/truststore"',
'$env:JVM_OPTS="$env:JVM_OPTS -Djavax.net.ssl.trustStore={path}"'.format(path=truststore)))
else:
replacement_list.append(('#JVM_OPTS="\$JVM_OPTS -Djavax.net.ssl.trustStore=/path/to/truststore"',
'JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.trustStore={path}"'.format(path=truststore)))
if truststore_password:
if common.is_win():
replacement_list.append(('#\$env:JVM_OPTS="\$env:JVM_OPTS -Djavax.net.ssl.trustStorePassword=<truststore-password>"',
'$env:JVM_OPTS="$env:JVM_OPTS -Djavax.net.ssl.trustStorePassword={password}"'.format(password=truststore_password)))
else:
replacement_list.append(('#JVM_OPTS="\$JVM_OPTS -Djavax.net.ssl.trustStorePassword=<truststore-password>"',
'JVM_OPTS="$JVM_OPTS -Djavax.net.ssl.trustStorePassword={password}"'.format(password=truststore_password)))
# switches off user authentication, distinct from validation of client certificates (i.e. require_client_auth)
if disable_user_auth:
if not common.is_win():
replacement_list.append(('JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=true"',
'JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=false"'))
common.replaces_in_file(node.envfilename(), replacement_list)
def apply_jmx_authentication(node):
replacement_list = [
('JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=false"',
'JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.authenticate=true"'),
('JVM_OPTS="\$JVM_OPTS -Dcom.sun.management.jmxremote.password.file=/etc/cassandra/jmxremote.password"',
'#JVM_OPTS="$JVM_OPTS -Dcom.sun.management.jmxremote.password.file=/etc/cassandra/jmxremote.password"'),
('#JVM_OPTS="\$JVM_OPTS -Dcassandra.jmx.remote.login.config=CassandraLogin"',
'JVM_OPTS="$JVM_OPTS -Dcassandra.jmx.remote.login.config=CassandraLogin"'),
('#JVM_OPTS="\$JVM_OPTS -Djava.security.auth.login.config=\$CASSANDRA_HOME/conf/cassandra-jaas.config"',
'JVM_OPTS="$JVM_OPTS -Djava.security.auth.login.config=$CASSANDRA_HOME/conf/cassandra-jaas.config"'),
('#JVM_OPTS="\$JVM_OPTS -Dcassandra.jmx.authorizer=org.apache.cassandra.auth.jmx.AuthorizationProxy"',
'JVM_OPTS="$JVM_OPTS -Dcassandra.jmx.authorizer=org.apache.cassandra.auth.jmx.AuthorizationProxy"')
]
common.replaces_in_file(node.envfilename(), replacement_list)
def remove_perf_disable_shared_mem(node):
"""
The Jolokia agent is incompatible with the -XX:+PerfDisableSharedMem JVM
option (see https://github.com/rhuss/jolokia/issues/198 for details). This
edits cassandra-env.sh (or the Windows equivalent), or jvm.options file on 3.2+ to remove that option.
"""
if LooseVersion(node.cluster.version()) >= LooseVersion('3.2'):
conf_file = os.path.join(node.get_conf_dir(), JVM_OPTIONS)
pattern = '\-XX:\+PerfDisableSharedMem'
replacement = '#-XX:+PerfDisableSharedMem'
else:
conf_file = node.envfilename()
pattern = 'PerfDisableSharedMem'
replacement = ''
common.replace_in_file(conf_file, pattern, replacement)
class JolokiaAgent(object):
"""
This class provides a simple way to read, write, and execute
JMX attributes and methods through a Jolokia agent.
Example usage:
node = cluster.nodelist()[0]
mbean = make_mbean('db', 'IndexSummaries')
with JolokiaAgent(node) as jmx:
avg_interval = jmx.read_attribute(mbean, 'AverageIndexInterval')
jmx.write_attribute(mbean, 'MemoryPoolCapacityInMB', 0)
jmx.execute_method(mbean, 'redistributeSummaries')
"""
node = None
def __init__(self, node):
self.node = node
def start(self):
"""
Starts the Jolokia agent. The process will fork from the parent
and continue running until stop() is called.
"""
args = (java_bin(),
'-cp', jolokia_classpath(),
'org.jolokia.jvmagent.client.AgentLauncher',
'--host', self.node.network_interfaces['binary'][0],
'start', str(self.node.pid))
try:
subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
print "Failed to start jolokia agent (command was: %s): %s" % (' '.join(args), exc)
print "Exit status was: %d" % (exc.returncode,)
print "Output was: %s" % (exc.output,)
raise
def stop(self):
"""
Stops the Jolokia agent.
"""
args = (java_bin(),
'-cp', jolokia_classpath(),
'org.jolokia.jvmagent.client.AgentLauncher',
'stop', str(self.node.pid))
try:
subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
print "Failed to stop jolokia agent (command was: %s): %s" % (' '.join(args), exc)
print "Exit status was: %d" % (exc.returncode,)
print "Output was: %s" % (exc.output,)
raise
def _query(self, body):
request_data = json.dumps(body)
url = 'http://%s:8778/jolokia/' % (self.node.network_interfaces['binary'][0],)
response = urlopen(url, data=request_data, timeout=10.0)
if response.code != 200:
raise Exception("Failed to query Jolokia agent; HTTP response code: %d; response: %s" % (response.code, response.readlines()))
raw_response = response.readline()
response = json.loads(raw_response)
if response['status'] != 200:
stacktrace = response.get('stacktrace')
if stacktrace:
print "Stacktrace from Jolokia error follows:"
for line in stacktrace.splitlines():
print line
raise Exception("Jolokia agent returned non-200 status: %s" % (response,))
return response
def read_attribute(self, mbean, attribute, path=None):
"""
Reads a single JMX attribute.
`mbean` should be the full name of an mbean. See the mbean() utility
function for an easy way to create this.
`attribute` should be the name of an attribute on that mbean.
`path` is an optional string that can be used to specify sub-attributes
for complex JMX attributes.
"""
body = {'type': 'read',
'mbean': mbean,
'attribute': attribute}
if path:
body['path'] = path
response = self._query(body)
return response['value']
def write_attribute(self, mbean, attribute, value, path=None):
"""
Writes a values to a single JMX attribute.
`mbean` should be the full name of an mbean. See the mbean() utility
function for an easy way to create this.
`attribute` should be the name of an attribute on that mbean.
`value` should be the new value for the attribute.
`path` is an optional string that can be used to specify sub-attributes
for complex JMX attributes.
"""
body = {'type': 'write',
'mbean': mbean,
'attribute': attribute,
'value': value}
if path:
body['path'] = path
self._query(body)
def execute_method(self, mbean, operation, arguments=None):
"""
Executes a method on a JMX mbean.
`mbean` should be the full name of an mbean. See the mbean() utility
function for an easy way to create this.
`operation` should be the name of the method on the mbean.
`arguments` is an optional list of arguments to pass to the method.
"""
if arguments is None:
arguments = []
body = {'type': 'exec',
'mbean': mbean,
'operation': operation,
'arguments': arguments}
response = self._query(body)
return response['value']
def __enter__(self):
""" For contextmanager-style usage. """
self.start()
return self
def __exit__(self, exc_type, value, traceback):
""" For contextmanager-style usage. """
self.stop()
return exc_type is None
| |
#
# Created as part of the StratusLab project (http://stratuslab.eu),
# co-funded by the European Commission under the Grant Agreement
# INFSO-RI-261552."
#
# Copyright (c) 2013, SixSq Sarl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import xml.etree.ElementTree as ET
import urllib2
import datetime
import time
from .one_query import get_all_users_from_one, get_all_vms_from_one, \
get_vm_details_from_one
from stratuslab.volume_manager.PersistentDisk import PersistentDisk
from stratuslab.ConfigHolder import ConfigHolder
from stratuslab import Util
class ManifestGetFromMarketplaceError(Exception):
pass
def bytes_to_GB(_bytes):
return _bytes / 1024 / 1024 / 1024
def bytes_to_giga_approximation(numberOfBytes):
return (numberOfBytes / 1024 ** 3) + 1
def url_get(url):
"""Raises urllib2.URLError"""
return urllib2.urlopen(url).read()
class Computer(object):
# The queueing time of the VM instantiation request.
VM_STARTTIME_ELEM = 'stime'
VM_ENDTIME_ELEM = 'etime'
VM_RUN_STARTTIME_ELEM = 'rstime'
VM_RUN_ENDTIME_ELEM = 'retime'
VM_EPILOG_ENDTIME_ELEM = 'eetime'
USER_IGNORE_LIST = ['oneadmin']
def __init__(self, fromInSecs, toInSecs, outputDir, daily,
stime_running=True, etime_done=True, user_names=[]):
self.outputDir = outputDir
self.daily = daily
self.user_names = user_names
self.marketplace_size_cache = {}
self.pdisk_size_cache = {}
self.fromInSecs = int(fromInSecs)
self.toInSecs = int(toInSecs)
"""Start time corresponds to the VM entering Running state. Alternative
is to use get_stime() - time when the request for the VM instantiation
was queued (Pending state)."""
self.get_starttime = stime_running and self.get_rstime or self.get_stime
"""End time is when the VM stopped Running. Another option is to use
get_donetime() which takes into account Epilog."""
self.get_endtime = etime_done and self.get_donetime or self.get_retime
def get_stime(self, vm):
return int(vm.find('slice/' + self.VM_STARTTIME_ELEM).text)
def get_rstime(self, vm):
return int(vm.find('slice/' + self.VM_RUN_STARTTIME_ELEM).text)
def get_retime(self, vm):
return int(vm.find('slice/' + self.VM_RUN_ENDTIME_ELEM).text)
def get_eetime(self, vm):
return int(vm.find('slice/' + self.VM_EPILOG_ENDTIME_ELEM).text)
def get_etime(self, vm):
return int(vm.find('slice/' + self.VM_ENDTIME_ELEM).text)
@staticmethod
def get_etime_vm_details(vm):
return int(vm.find('ETIME').text)
def get_donetime(self, vm):
retime = self.get_retime(vm)
eetime = self.get_eetime(vm)
etime = (eetime > retime) and eetime or retime
if etime == 0:
etime = self.get_etime(vm)
# The VM might have failed to start. In this case 'ETIME' is set
# on the VM details only taken from ONE DB by 'onevm'. This looks
# like a bug in 'oneacctd' or 'oneacct'.
if etime == 0:
etime = self._query_etime_from_vm_details(vm)
return etime
def _query_etime_from_vm_details(self, vm):
vm_details = get_vm_details_from_one(self.get_id(vm))
return self.get_etime_vm_details(vm_details)
@staticmethod
def get_id(vm):
return vm.get('id')
def vm_in_range(self, vm):
"""Filter out VMs that were stopped before or started after the time
slice we are concerned with."""
endtime = int(self.get_endtime(vm))
# endtime == 0 assumes that the VM is still running or didn't run
if endtime > 0 and endtime < self.fromInSecs: # ended before metering window
return False
starttime = int(self.get_starttime(vm))
if starttime > self.toInSecs: # started after metering window
return False
if starttime == 0: # VM didn't run
stime = self.get_stime(vm)
if stime > self.fromInSecs and stime < self.toInSecs:
return True
else:
return False
return True
def user_in_range(self, user):
username = user.findtext('NAME')
return self.username_in_range(username)
def username_in_range(self, username):
if username in self.USER_IGNORE_LIST:
print 'skipping', username
return False
return True
def filter_users(self, root):
def _append_user(_users, u):
user = {}
user['id'] = u.findtext('ID')
user['name'] = u.findtext('NAME')
_users.append(user)
users = []
if root is not None:
if self.user_names:
cloud_users = dict((u.findtext('NAME'), u) for u in root.findall('USER'))
for name in self.user_names:
if self.username_in_range(name):
try:
u = cloud_users[name]
except KeyError:
print 'WARNING: user %s not found.' % name
else:
_append_user(users, cloud_users[name])
else:
for u in root.getiterator('USER'):
if self.user_in_range(u):
_append_user(users, u)
return users
def filter_and_update_vms(self, root):
vms = []
if root is not None:
for vm in root.getiterator('vm'):
if self.vm_in_range(vm):
self._update_time_on_vm(vm)
vms.append(vm)
return vms
def _update_time_on_vm(self, vm):
_slice = vm.find('slice')
if _slice is None:
print 'time for missing slice:', vm.findtext('time')
timeElem = vm.find('time')
timeElem.text = "XX"
else:
meter_stime, meter_etime = self.get_meter_start_end_times(vm)
self.set_starttime(vm, meter_stime)
self.set_endtime(vm, meter_etime)
# Total time should be in hours
delta = int((meter_etime - meter_stime) / 60 / 60)
self.set_totaltime(vm, (delta > 0) and delta or 0)
vm.remove(_slice)
def get_meter_start_end_times(self, vm):
stime = self.get_starttime(vm)
etime = self.get_endtime(vm)
if stime == etime: # VM didn't run
meter_stime = self.get_stime(vm)
meter_etime = self.get_etime(vm)
else:
meter_stime = self.get_meter_stime(vm)
meter_etime = self.get_meter_etime(vm)
return meter_stime, meter_etime
def get_meter_stime(self, vm):
stime = self.get_starttime(vm)
if stime == 0: # VM didn't run
stime = self.get_stime(vm)
if self.fromInSecs > stime:
return self.fromInSecs
else:
return stime
def get_meter_etime(self, vm):
etime = self.get_endtime(vm)
if etime == 0: # Machine is still running or didn't run
return self.toInSecs
if etime < self.toInSecs:
return etime
else:
return self.toInSecs
@staticmethod
def set_totaltime(vm, _time):
time_elem = vm.find('time')
time_elem.text = str(_time)
def set_starttime(self, vm, starttime):
self._vm_set_time_in_sec(vm, starttime, 'starttime')
def set_endtime(self, vm, endtime):
self._vm_set_time_in_sec(vm, endtime, 'endtime')
@staticmethod
def _vm_set_time_in_sec(vm, _time, time_elem_name):
time_elem = ET.Element(time_elem_name)
time_elem.text = str(datetime.datetime.utcfromtimestamp(float(_time)))
vm.append(time_elem)
def get_sizes(self, vmDetail):
disks = self.get_disks(vmDetail)
sizes = [self.get_disk_size(disk) for disk in disks]
return sizes
@staticmethod
def insert_disks(vm, sizes):
for size in sizes:
diskElement = ET.Element('disk')
sizeElement = ET.Element('size')
sizeElement.text = str(size)
diskElement.append(sizeElement)
vm.append(diskElement)
def add_detail_info(self, vms):
for vm in vms:
vmDetail = get_vm_details_from_one(self.get_id(vm))
if vmDetail is not None:
sizes = self.get_sizes(vmDetail)
self.insert_disks(vm, sizes)
vm.find('name').text = vmDetail.find('NAME').text
return vms
@staticmethod
def get_disks(vm):
return vm.findall('TEMPLATE/DISK')
def get_disk_size(self, disk):
size = disk.find('SIZE')
if size is not None:
return float(size.text) / 1024
else:
source = self.get_disk_source(disk)
if source.startswith('http'):
return self.get_size_from_marketplace_or_pdisk_by_manifest_id(source)
elif source.startswith('pdisk'):
return self.get_size_from_pdisk(source)
def get_size_from_pdisk(self, uri):
"""uri - pdisk:ip[:port]:uuid
"""
if uri in self.pdisk_size_cache:
return self.pdisk_size_cache[uri]
endpoint, uuid = PDiskHelper.get_endpoint_and_disk_uuid_from_uri(uri)
size_gb = PDiskHelper.get_size(endpoint, uuid)
self.pdisk_size_cache[uri] = size_gb
return size_gb
def get_size_from_marketplace_or_pdisk_by_manifest_id(self, source):
"""Return size in GB as int. Size is set to 0GB if not found neither in
Marketplace nor PDisk. Cache the size on success.
"""
# Check the size in cache.
if source in self.marketplace_size_cache:
return self.marketplace_size_cache[source]
size_gb = self._get_size_from_marketplace_or_pdisk_by_manifest_id(source)
# Cache the size.
self.marketplace_size_cache[source] = size_gb
return size_gb
@staticmethod
def _get_size_from_marketplace_or_pdisk_by_manifest_id(url):
try:
return MPHelper.get_size(url)
except ManifestGetFromMarketplaceError as ex:
print 'WARNING: Failed to get disk size %s from MP: %s' % (url, str(ex))
print 'WARNING: Trying to get disk size from PDisk.'
try:
return PDiskHelper.get_size_by_marketplace_url(url)
except Exception as ex:
print 'WARNING: Failed to get size from PDisk by MP ID: %s. Setting to 0GB.' % str(ex)
return 0
@staticmethod
def get_disk_source(disk):
return disk.find('SOURCE').text.strip()
@staticmethod
def compute_totals(root):
totalTime = 0
totalCpu = 0
totalRam = 0
totalDisk = 0
# totalNetRx = 0
# totalNetTx = 0
for vm in root.findall('vm'):
time = float(vm.find('time').text) # in hours
totalTime += time
totalCpu += float(vm.find('cpu').text) * time
totalRam += float(vm.find('mem').text) * time / 1024
# totalNetRx += int(vm.find('net_rx').text)
# totalNetTx += int(vm.find('net_tx').text)
disk = reduce(lambda a, b: a + b, [float(disk.find('size').text) for disk in vm.findall('disk')], 0)
totalDisk += disk * time
root.set('total_time', str("%.0f" % totalTime))
root.set('total_cpu', str("%.0f" % totalCpu))
root.set('total_ram', str("%.0f" % totalRam))
root.set('total_disk', str("%.0f" % totalDisk))
# root.set('total_net_rx', str("%.0f" % (bytes_to_GB(totalNetRx))))
# root.set('total_net_tx', str("%.0f" % (bytes_to_GB(totalNetTx))))
def get_users(self):
return self.filter_users(get_all_users_from_one())
def _append_vms(self, root, allVms):
if allVms is not None:
filteredVms = self.filter_and_update_vms(allVms)
withDiskInfoVms = self.add_detail_info(filteredVms)
for vm in withDiskInfoVms:
root.append(vm)
def compute_user(self, user):
_id = user['id']
username = user['name']
print 'processing', username, '...'
allVms = get_all_vms_from_one(_id)
root = ET.Element('usagerecord')
self._append_vms(root, allVms)
root.set('userid', _id)
root.set('username', username)
_from = datetime.datetime.utcfromtimestamp(self.fromInSecs)
root.set('from', str(_from))
to = datetime.datetime.utcfromtimestamp(self.toInSecs)
root.set('to', str(to))
self.compute_totals(root)
dateFormat = '%d%m%Y'
hourFormat = '%H%M%S'
filenameTemplate = "acctpy_User-Id%(id)s_%(date)s.xml"
if(self.daily):
formattedDate = to.strftime(dateFormat)
filename = os.path.join(self.outputDir, filenameTemplate % \
{'id': _id, 'date': formattedDate})
else:
formattedDate = _from.strftime(dateFormat) + '_' + \
_from.strftime(hourFormat) + '-' + to.strftime(hourFormat)
filename = os.path.join(self.outputDir, filenameTemplate % \
{'id': _id, 'date': formattedDate})
open(filename, 'w').write(ET.tostring(root))
def compute(self):
for user in self.get_users():
try:
self.compute_user(user)
except Exception as ex:
_time = time.strftime("%Y-%m-%dT%H:%M:%SZ",
time.gmtime(time.time()))
print _time, "Error processing user", user['name']
print ex
return
class PDiskHelper(object):
"""
PDisk credentials will be recovered from configuration file by
PersistentDisk. User under which the script is ran should be in the
group that has the permission to access the configuration file.
"""
@staticmethod
def get_size(endpoint, uuid):
pd = PDiskHelper._get_pdisk()
size_gb = pd.getValue('size', uuid)
size_gb = size_gb.replace(',', '')
return int(size_gb)
@staticmethod
def get_size_by_marketplace_url(markteplace_url):
manifest_id = markteplace_url.split('/')[-1]
return PDiskHelper._get_size_by_manifest_id(manifest_id)
@staticmethod
def get_endpoint_and_disk_uuid_from_uri(uri):
"""uri - pdisk:ip[:port]:uuid
"""
parts = uri.replace('/', ':').split(':')
if len(parts) == 4:
_, ip, port, uuid = parts
endpoint = '%s:%s' % (ip, port)
elif len(parts) == 3:
_proto, ip, uuid = parts
endpoint = ip
else:
raise Exception('Failed to get PDisk endpoint from disk URI: %s' % uri)
return endpoint, uuid
@staticmethod
def _get_size_by_manifest_id(manifest_id):
pd = PDiskHelper._get_pdisk()
filter = ('tag', manifest_id)
uuids = pd.search(*filter)
if not uuids:
raise Exception('Failed to find %s in PDisk.' % str(filter))
if len(uuids) > 1:
print 'WARNING: got more than one PDisk uuid with %s.' % str(filter)
size_gb = pd.getValue('size', uuids[0])
return int(size_gb)
@staticmethod
def _get_pdisk():
config = PDiskHelper._get_config_holder()
return PersistentDisk(config)
@staticmethod
def _get_config_holder():
config = PDiskHelper._get_config_as_dict()
ch = ConfigHolder()
ch.set('pdiskEndpoint', config['persistentDiskIp'])
ch.set('persistentDiskCloudServiceUser', config['persistentDiskCloudServiceUser'])
return ch
@staticmethod
def _get_config_as_dict():
config_file = Util.defaultConfigFile
try:
return ConfigHolder.configFileToDictWithFormattedKeys(config_file)
except Exception as ex:
raise Exception('Failed to get persistent_disk_ip parameter '
'from %s: %s' % (config_file, str(ex)))
class MPHelper(object):
"""StratusLab Marketplace wrapper.
"""
@staticmethod
def get_size(url):
"""Return size in bytes as int or raise ManifestGetFromMarketplaceError().
"""
try:
marketplaceDefinition = url_get(url + '?status=all&location=all')
except urllib2.URLError as ex:
raise ManifestGetFromMarketplaceError("Error retrieving %s: '%s'" %
(url, str(ex)))
else:
root = ET.fromstring(marketplaceDefinition)
size_bytes = root.find('{0}RDF/{0}Description/{1}bytes'.\
format("{http://www.w3.org/1999/02/22-rdf-syntax-ns#}",
"{http://mp.stratuslab.eu/slreq#}")).text
return bytes_to_giga_approximation(int(size_bytes))
| |
"""Viessmann ViCare climate device."""
from contextlib import suppress
import logging
from PyViCare.PyViCare import PyViCareNotSupportedFeatureError, PyViCareRateLimitError
import requests
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_COMFORT,
PRESET_ECO,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS
from homeassistant.helpers import entity_platform
from . import (
DOMAIN as VICARE_DOMAIN,
VICARE_API,
VICARE_HEATING_TYPE,
VICARE_NAME,
HeatingType,
)
_LOGGER = logging.getLogger(__name__)
SERVICE_SET_VICARE_MODE = "set_vicare_mode"
SERVICE_SET_VICARE_MODE_ATTR_MODE = "vicare_mode"
VICARE_MODE_DHW = "dhw"
VICARE_MODE_HEATING = "heating"
VICARE_MODE_DHWANDHEATING = "dhwAndHeating"
VICARE_MODE_DHWANDHEATINGCOOLING = "dhwAndHeatingCooling"
VICARE_MODE_FORCEDREDUCED = "forcedReduced"
VICARE_MODE_FORCEDNORMAL = "forcedNormal"
VICARE_MODE_OFF = "standby"
VICARE_PROGRAM_ACTIVE = "active"
VICARE_PROGRAM_COMFORT = "comfort"
VICARE_PROGRAM_ECO = "eco"
VICARE_PROGRAM_EXTERNAL = "external"
VICARE_PROGRAM_HOLIDAY = "holiday"
VICARE_PROGRAM_NORMAL = "normal"
VICARE_PROGRAM_REDUCED = "reduced"
VICARE_PROGRAM_STANDBY = "standby"
VICARE_HOLD_MODE_AWAY = "away"
VICARE_HOLD_MODE_HOME = "home"
VICARE_HOLD_MODE_OFF = "off"
VICARE_TEMP_HEATING_MIN = 3
VICARE_TEMP_HEATING_MAX = 37
SUPPORT_FLAGS_HEATING = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
VICARE_TO_HA_HVAC_HEATING = {
VICARE_MODE_DHW: HVAC_MODE_OFF,
VICARE_MODE_HEATING: HVAC_MODE_HEAT,
VICARE_MODE_DHWANDHEATING: HVAC_MODE_AUTO,
VICARE_MODE_DHWANDHEATINGCOOLING: HVAC_MODE_AUTO,
VICARE_MODE_FORCEDREDUCED: HVAC_MODE_OFF,
VICARE_MODE_FORCEDNORMAL: HVAC_MODE_HEAT,
VICARE_MODE_OFF: HVAC_MODE_OFF,
}
HA_TO_VICARE_HVAC_HEATING = {
HVAC_MODE_HEAT: VICARE_MODE_FORCEDNORMAL,
HVAC_MODE_OFF: VICARE_MODE_FORCEDREDUCED,
HVAC_MODE_AUTO: VICARE_MODE_DHWANDHEATING,
}
VICARE_TO_HA_PRESET_HEATING = {
VICARE_PROGRAM_COMFORT: PRESET_COMFORT,
VICARE_PROGRAM_ECO: PRESET_ECO,
}
HA_TO_VICARE_PRESET_HEATING = {
PRESET_COMFORT: VICARE_PROGRAM_COMFORT,
PRESET_ECO: VICARE_PROGRAM_ECO,
}
async def async_setup_platform(
hass, hass_config, async_add_entities, discovery_info=None
):
"""Create the ViCare climate devices."""
if discovery_info is None:
return
vicare_api = hass.data[VICARE_DOMAIN][VICARE_API]
heating_type = hass.data[VICARE_DOMAIN][VICARE_HEATING_TYPE]
async_add_entities(
[
ViCareClimate(
f"{hass.data[VICARE_DOMAIN][VICARE_NAME]} Heating",
vicare_api,
heating_type,
)
]
)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
SERVICE_SET_VICARE_MODE,
{
vol.Required(SERVICE_SET_VICARE_MODE_ATTR_MODE): vol.In(
VICARE_TO_HA_HVAC_HEATING
)
},
"set_vicare_mode",
)
class ViCareClimate(ClimateEntity):
"""Representation of the ViCare heating climate device."""
def __init__(self, name, api, heating_type):
"""Initialize the climate device."""
self._name = name
self._state = None
self._api = api
self._attributes = {}
self._target_temperature = None
self._current_mode = None
self._current_temperature = None
self._current_program = None
self._heating_type = heating_type
self._current_action = None
def update(self):
"""Let HA know there has been an update from the ViCare API."""
try:
_room_temperature = None
with suppress(PyViCareNotSupportedFeatureError):
_room_temperature = self._api.getRoomTemperature()
_supply_temperature = None
with suppress(PyViCareNotSupportedFeatureError):
_supply_temperature = self._api.getSupplyTemperature()
if _room_temperature is not None:
self._current_temperature = _room_temperature
elif _supply_temperature is not None:
self._current_temperature = _supply_temperature
else:
self._current_temperature = None
with suppress(PyViCareNotSupportedFeatureError):
self._current_program = self._api.getActiveProgram()
with suppress(PyViCareNotSupportedFeatureError):
self._target_temperature = self._api.getCurrentDesiredTemperature()
with suppress(PyViCareNotSupportedFeatureError):
self._current_mode = self._api.getActiveMode()
# Update the generic device attributes
self._attributes = {}
self._attributes["room_temperature"] = _room_temperature
self._attributes["active_vicare_program"] = self._current_program
self._attributes["active_vicare_mode"] = self._current_mode
with suppress(PyViCareNotSupportedFeatureError):
self._attributes[
"heating_curve_slope"
] = self._api.getHeatingCurveSlope()
with suppress(PyViCareNotSupportedFeatureError):
self._attributes[
"heating_curve_shift"
] = self._api.getHeatingCurveShift()
# Update the specific device attributes
if self._heating_type == HeatingType.gas:
with suppress(PyViCareNotSupportedFeatureError):
self._current_action = self._api.getBurnerActive()
elif self._heating_type == HeatingType.heatpump:
with suppress(PyViCareNotSupportedFeatureError):
self._current_action = self._api.getCompressorActive()
except requests.exceptions.ConnectionError:
_LOGGER.error("Unable to retrieve data from ViCare server")
except PyViCareRateLimitError as limit_exception:
_LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception)
except ValueError:
_LOGGER.error("Unable to decode data from ViCare server")
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_HEATING
@property
def name(self):
"""Return the name of the climate device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def hvac_mode(self):
"""Return current hvac mode."""
return VICARE_TO_HA_HVAC_HEATING.get(self._current_mode)
def set_hvac_mode(self, hvac_mode):
"""Set a new hvac mode on the ViCare API."""
vicare_mode = HA_TO_VICARE_HVAC_HEATING.get(hvac_mode)
if vicare_mode is None:
raise ValueError(
f"Cannot set invalid vicare mode: {hvac_mode} / {vicare_mode}"
)
_LOGGER.debug("Setting hvac mode to %s / %s", hvac_mode, vicare_mode)
self._api.setMode(vicare_mode)
@property
def hvac_modes(self):
"""Return the list of available hvac modes."""
return list(HA_TO_VICARE_HVAC_HEATING)
@property
def hvac_action(self):
"""Return the current hvac action."""
if self._current_action:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def min_temp(self):
"""Return the minimum temperature."""
return VICARE_TEMP_HEATING_MIN
@property
def max_temp(self):
"""Return the maximum temperature."""
return VICARE_TEMP_HEATING_MAX
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_WHOLE
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
if (temp := kwargs.get(ATTR_TEMPERATURE)) is not None:
self._api.setProgramTemperature(self._current_program, temp)
self._target_temperature = temp
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
return VICARE_TO_HA_PRESET_HEATING.get(self._current_program)
@property
def preset_modes(self):
"""Return the available preset mode."""
return list(VICARE_TO_HA_PRESET_HEATING)
def set_preset_mode(self, preset_mode):
"""Set new preset mode and deactivate any existing programs."""
vicare_program = HA_TO_VICARE_PRESET_HEATING.get(preset_mode)
if vicare_program is None:
raise ValueError(
f"Cannot set invalid vicare program: {preset_mode}/{vicare_program}"
)
_LOGGER.debug("Setting preset to %s / %s", preset_mode, vicare_program)
self._api.deactivateProgram(self._current_program)
self._api.activateProgram(vicare_program)
@property
def extra_state_attributes(self):
"""Show Device Attributes."""
return self._attributes
def set_vicare_mode(self, vicare_mode):
"""Service function to set vicare modes directly."""
if vicare_mode not in VICARE_TO_HA_HVAC_HEATING:
raise ValueError(f"Cannot set invalid vicare mode: {vicare_mode}")
self._api.setMode(vicare_mode)
| |
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
MQTT client utility: Tries to hide Paho client details to ease MQTT usage.
Reconnects to the MQTT server automatically.
This module depends on the paho-mqtt package (ex-mosquitto), provided by the
Eclipse Foundation: see http://www.eclipse.org/paho
:author: Thomas Calmant
:copyright: Copyright 2020, Thomas Calmant
:license: Apache License 2.0
:version: 1.0.1
..
Copyright 2020 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
import logging
import os
import sys
import threading
# MQTT client
import paho.mqtt.client as paho
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class MqttClient(object):
"""
Remote Service discovery provider based on MQTT
"""
def __init__(self, client_id=None):
"""
Sets up members
:param client_id: ID of the MQTT client
"""
# No ID
if not client_id:
# Randomize client ID
self._client_id = self.generate_id()
elif len(client_id) > 23:
# ID too large
_logger.warning(
"MQTT Client ID '%s' is too long (23 chars max): "
"generating a random one",
client_id,
)
# Keep the client ID as it might be accepted
self._client_id = client_id
else:
# Keep the ID as is
self._client_id = client_id
# Reconnection timer
self.__timer = threading.Timer(5, self.__reconnect)
# Publication events
self.__in_flight = {}
# MQTT client
self.__mqtt = paho.Client(self._client_id)
# Give access to Paho methods to configure TLS
self.tls_set = self.__mqtt.tls_set
# Paho callbacks
self.__mqtt.on_connect = self.__on_connect
self.__mqtt.on_disconnect = self.__on_disconnect
self.__mqtt.on_message = self.__on_message
self.__mqtt.on_publish = self.__on_publish
@property
def raw_client(self):
"""
Returns the raw client object, depending on the underlying library
"""
return self.__mqtt
@staticmethod
def on_connect(client, result_code):
"""
User callback: called when the client is connected
:param client: The Pelix MQTT client which connected
:param result_code: The MQTT result code
"""
pass
@staticmethod
def on_disconnect(client, result_code):
"""
User callback: called when the client is disconnected
:param client: The Pelix MQTT client which disconnected
:param result_code: The MQTT result code
"""
pass
@staticmethod
def on_message(client, message):
"""
User callback: called when the client has received a message
:param client: The Pelix MQTT client which received a message
:param message: The MQTT message
"""
pass
@classmethod
def generate_id(cls, prefix="pelix-"):
"""
Generates a random MQTT client ID
:param prefix: Client ID prefix (truncated to 8 chars)
:return: A client ID of 22 or 23 characters
"""
if not prefix:
# Normalize string
prefix = ""
else:
# Truncate long prefixes
prefix = prefix[:8]
# Prepare the missing part
nb_bytes = (23 - len(prefix)) // 2
random_bytes = os.urandom(nb_bytes)
if sys.version_info[0] >= 3:
random_ints = [char for char in random_bytes]
else:
random_ints = [ord(char) for char in random_bytes]
random_id = "".join("{0:02x}".format(value) for value in random_ints)
return "{0}{1}".format(prefix, random_id)
@classmethod
def topic_matches(cls, subscription_filter, topic):
"""
Checks if the given topic matches the given subscription filter
:param subscription_filter: A MQTT subscription filter
:param topic: A topic
:return: True if the topic matches the filter
"""
return paho.topic_matches_sub(subscription_filter, topic)
@property
def client_id(self):
"""
The MQTT client ID
"""
return self._client_id
def set_credentials(self, username, password):
"""
Sets the user name and password to be authenticated on the server
:param username: Client username
:param password: Client password
"""
self.__mqtt.username_pw_set(username, password)
def set_will(self, topic, payload, qos=0, retain=False):
"""
Sets up the will message
:param topic: Topic of the will message
:param payload: Content of the message
:param qos: Quality of Service
:param retain: The message will be retained
:raise ValueError: Invalid topic
:raise TypeError: Invalid payload
"""
self.__mqtt.will_set(topic, payload, qos, retain=retain)
def connect(self, host="localhost", port=1883, keepalive=60):
"""
Connects to the MQTT server. The client will automatically try to
reconnect to this server when the connection is lost.
:param host: MQTT server host
:param port: MQTT server port
:param keepalive: Maximum period in seconds between communications with
the broker
:raise ValueError: Invalid host or port
"""
# Disconnect first (it also stops the timer)
self.disconnect()
# Prepare the connection
self.__mqtt.connect(host, port, keepalive)
# Start the MQTT loop
self.__mqtt.loop_start()
def disconnect(self):
"""
Disconnects from the MQTT server
"""
# Stop the timer
self.__stop_timer()
# Unlock all publishers
for event in self.__in_flight.values():
event.set()
# Disconnect from the server
self.__mqtt.disconnect()
# Stop the MQTT loop thread
# Use a thread to avoid a dead lock in Paho
thread = threading.Thread(target=self.__mqtt.loop_stop)
thread.daemon = True
thread.start()
# Give it some time
thread.join(4)
def publish(self, topic, payload, qos=0, retain=False, wait=False):
"""
Sends a message through the MQTT connection
:param topic: Message topic
:param payload: Message content
:param qos: Quality of Service
:param retain: Retain flag
:param wait: If True, prepares an event to wait for the message to be
published
:return: The local message ID, None on error
"""
result = self.__mqtt.publish(topic, payload, qos, retain)
if wait and not result[0]:
# Publish packet sent, wait for it to return
self.__in_flight[result[1]] = threading.Event()
_logger.debug("Waiting for publication of %s", topic)
return result[1]
def wait_publication(self, mid, timeout=None):
"""
Wait for a publication to be validated
:param mid: Local message ID (result of publish)
:param timeout: Wait timeout (in seconds)
:return: True if the message was published, False if timeout was raised
:raise KeyError: Unknown waiting local message ID
"""
return self.__in_flight[mid].wait(timeout)
def subscribe(self, topic, qos=0):
"""
Subscribes to a topic on the server
:param topic: Topic filter string(s)
:param qos: Desired quality of service
:raise ValueError: Invalid topic or QoS
"""
self.__mqtt.subscribe(topic, qos)
def unsubscribe(self, topic):
"""
Unscribes from a topic on the server
:param topic: Topic(s) to unsubscribe from
:raise ValueError: Invalid topic parameter
"""
self.__mqtt.unsubscribe(topic)
def __start_timer(self, delay):
"""
Starts the reconnection timer
:param delay: Delay (in seconds) before calling the reconnection method
"""
self.__timer = threading.Timer(delay, self.__reconnect)
self.__timer.daemon = True
self.__timer.start()
def __stop_timer(self):
"""
Stops the reconnection timer, if any
"""
if self.__timer is not None:
self.__timer.cancel()
self.__timer = None
def __reconnect(self):
"""
Tries to connect to the MQTT server
"""
# Cancel the timer, if any
self.__stop_timer()
try:
# Try to reconnect the server
result_code = self.__mqtt.reconnect()
if result_code:
# Something wrong happened
message = "Error connecting the MQTT server: {0} ({1})".format(
result_code, paho.error_string(result_code)
)
_logger.error(message)
raise ValueError(message)
except Exception as ex:
# Something went wrong: log it
_logger.error("Exception connecting server: %s", ex)
finally:
# Prepare a reconnection timer. It will be cancelled by the
# on_connect callback
self.__start_timer(10)
def __on_connect(self, client, userdata, flags, result_code):
# pylint: disable=W0613
"""
Client connected to the server
:param client: Connected Paho client
:param userdata: User data (unused)
:param flags: Response flags sent by the broker
:param result_code: Connection result code (0: success, others: error)
"""
if result_code:
# result_code != 0: something wrong happened
_logger.error(
"Error connecting the MQTT server: %s (%d)",
paho.connack_string(result_code),
result_code,
)
else:
# Connection is OK: stop the reconnection timer
self.__stop_timer()
# Notify the caller, if any
if self.on_connect is not None:
try:
self.on_connect(self, result_code)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
def __on_disconnect(self, client, userdata, result_code):
# pylint: disable=W0613
"""
Client has been disconnected from the server
:param client: Client that received the message
:param userdata: User data (unused)
:param result_code: Disconnection reason (0: expected, 1: error)
"""
if result_code:
# rc != 0: unexpected disconnection
_logger.error(
"Unexpected disconnection from the MQTT server: %s (%d)",
paho.connack_string(result_code),
result_code,
)
# Try to reconnect
self.__stop_timer()
self.__start_timer(2)
# Notify the caller, if any
if self.on_disconnect is not None:
try:
self.on_disconnect(self, result_code)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
def __on_message(self, client, userdata, msg):
# pylint: disable=W0613
"""
A message has been received from a server
:param client: Client that received the message
:param userdata: User data (unused)
:param msg: A MQTTMessage bean
"""
# Notify the caller, if any
if self.on_message is not None:
try:
self.on_message(self, msg)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
def __on_publish(self, client, userdata, mid):
# pylint: disable=W0613
"""
A message has been published by a server
:param client: Client that received the message
:param userdata: User data (unused)
:param mid: Message ID
"""
try:
self.__in_flight[mid].set()
except KeyError:
pass
| |
import os.path
import numpy
#from regions import Segmentation, Region, Contact
from sys import stderr
def ReadRegionsFile ( regions_file_path, dmap, smod = None) :
print "Reading regions ---"
try :
e = numpy.fromfile ( regions_file_path, numpy.double )
except :
print "could not read:", regions_file_path
return None
print " - read ", len(e)
import regions
reload (regions)
if smod == None :
regions_file = os.path.basename ( regions_file_path )
smod = regions.Segmentation(regions_file, dmap)
else :
print " - found", smod.name
smod.remove_all_regions()
smod.path = os.path.dirname ( regions_file_path ) + os.path.sep
smod.adj_graph = None
print " - " + smod.path + smod.name
regions, groups, at = ParseRegions ( e, smod )
smod.regions = set(groups)
smod.id_to_region = regions
smod.rcons = ParseContacts(e, at, regions)
return smod
def ParseRegions ( e, smod ) :
nregions = int ( e[0] )
print " - reading %d regions..." % nregions
at = 1
regs = {}
all_regions = {} # Includes groups.
import regions
reload (regions)
for i in range ( nregions ) :
try : nvoxels = int ( e[at] )
except :
print " - reached end of file before reading all regions"
break
at += 1
rvs = e [ at : at + (nvoxels*3) ]
at += nvoxels*3
print "Region %d - %d voxels" % (i, nvoxels)
rpoints = numpy.reshape ( rvs, (nvoxels, 3) ).astype ( numpy.int32 )
#print rpoints
nparents = int ( e[at] )
at += 1
parents = e [ at : at + nparents ].astype ( numpy.int )
at += nparents
rid = i+1
reg = regions.Region ( smod, rid, rpoints[0] )
smod.mask[rpoints[:,2],rpoints[:,1],rpoints[:,0]] = rid # set mask at points
all_regions [ reg.rid ] = reg
regs [ reg.rid ] = reg
last_reg = reg
reg.preg = None
for pi in parents :
if pi in all_regions:
preg = all_regions[pi]
else :
preg = regions.Region ( smod, pi )
preg.max_point = rpoints[0]
all_regions[pi] = preg
last_reg.preg = preg
if preg.cregs.count ( last_reg ) == 0 :
preg.cregs.append ( last_reg )
last_reg = preg
# Regions table only includes top level groups.
groups = [ reg for reg in all_regions.values() if reg.preg is None ]
return all_regions, groups, at
def ParseContacts ( e, at, regs ):
import regions
reload (regions)
try : ncon = int ( e[at] )
except :
print " - reached end of file before reading contacts"
ncon = 0
at += 1
rcons = {}
if ncon > 0 :
print " - reading %d contacts..." % ( ncon )
am = e [ at : at + (ncon*4) ]
cm = numpy.reshape ( am, (ncon, 4) )
for i in range ( ncon ) :
c = cm[i]
rid1, rid2 = int(c[0]), int(c[1])
#from regions import Contact
o = regions.Contact(c[2])
o.D = c[3]
try : r1 = regs[rid1]
except : print "File error: contact region id", rid1; continue
try : r2 = regs[rid2]
except : print "File error: contact region id", rid2; continue
if r1 == r2 : print "File error: self contact id", rid1
if not r1 in rcons : rcons[r1] = {}
if not r2 in rcons : rcons[r2] = {}
rcons[r1][r2] = o
rcons[r2][r1] = o
print ""
return rcons
def WriteRegionsFile ( smod, fname = None ) :
if fname is None:
# Show save-file dialog.
def save ( okay, dialog ):
if okay:
paths = dialog.getPaths ( )
if paths:
WriteRegionsFile ( smod, paths[0] )
bname = smod.name [ 0 : smod.name.rfind ('_regions') ]
prefix = smod.path + bname + "_regions_save_%d"
uupath = unusedFile ( prefix )
from OpenSave import SaveModeless
d = SaveModeless ( title = 'Save Regions',
initialdir = smod.path,
initialfile = os.path.basename(uupath),
command = save )
return
tot_write_regions = [0]
tot_e_size = 1
for region in smod.regions :
tot_e_size += RegionSize ( region, tot_write_regions )
num_cons = 0
rcons = smod.region_contacts()
for r1, cr1 in rcons.iteritems () :
for r2, o in cr1.iteritems () :
if r1.rid < r2.rid :
num_cons = num_cons + 1
tot_e_size = tot_e_size + 1 + 4 * (num_cons)
print "Writing %d regions, %d grouped" % ( tot_write_regions[0], len(smod.regions) )
if fname : print " - to", fname
e = numpy.zeros ( [tot_e_size], numpy.float32 )
e[0] = float ( tot_write_regions[0] )
e_at = 1
rlist = renumberRegions(smod)
for region in rlist :
e_at = AddRegion ( smod, region, e, e_at )
print " - writing %d contacts" % ( num_cons )
e[e_at] = float ( num_cons )
e_at = e_at + 1
#consa = []
for r1, cr1 in rcons.iteritems () :
for r2, o in cr1.iteritems () :
if r1.rid < r2.rid :
#consa = consa + [r1.rid, r2.rid, o.N, o.D]
e[e_at+0] = float ( r1.rid )
e[e_at+1] = float ( r2.rid )
e[e_at+2] = o.N
e[e_at+3] = o.D
e_at = e_at + 4
#consa = [len(consa)/4] + consa
#e = numpy.concatenate ( [ e, numpy.array ( consa, numpy.float32 ) ] )
e.tofile ( fname )
print "Wrote %s" % os.path.basename(fname)
def AddRegion ( smod, region, e, e_at ) :
if len(region.cregs) == 0 :
e[e_at] = float ( region.point_count() )
e_at = e_at + 1
for rp in region.points() :
e[e_at] = rp[0]; e_at = e_at + 1
e[e_at] = rp[1]; e_at = e_at + 1
e[e_at] = rp[2]; e_at = e_at + 1
rp = region
parents = []
while rp.preg != None :
rp = rp.preg
parents.append ( rp.rid )
e[e_at] = float ( len(parents) )
e_at = e_at + 1
for pi in parents :
e[e_at] = float ( pi )
e_at = e_at + 1
else :
for creg in region.cregs :
e_at = AddRegion ( smod, creg, e, e_at )
return e_at
def RegionSize ( region, tot_write_regions ) :
if len(region.cregs) == 0 :
rp = region
nparents = 0
while rp.preg != None :
rp = rp.preg
nparents = nparents + 1
tot_write_regions[0] += 1
return 1 + region.point_count()*3 + 1 + nparents
else :
size = 0
for creg in region.cregs :
size = size + RegionSize ( creg, tot_write_regions )
return size
#
# Number regions having no children in depth first order since that is the
# order they will be written to the file. Renumber nodes with children
# using higher numbers to keep all region numbers distinct.
#
def renumberRegions(smod):
newrid = {}
parents = []
rlist = list(smod.regions)
rlist.sort(lambda r1, r2: cmp(r1.rid, r2.rid))
for r in rlist:
renumberRegion(r, newrid, parents)
for r in parents:
next_id = len(newrid) + 1
newrid[r] = next_id
for r, rid in newrid.items():
r.rid = rid
smod.id_to_region = dict([(r.id,r) for r in smod.id_to_region.values()])
return rlist
def renumberRegion(r, rid, parents):
if len(r.cregs) == 0:
next_id = len(rid) + 1
rid[r] = next_id
else:
parents.append(r)
for c in r.cregs:
renumberRegion(c, rid, parents)
def unusedFile ( path_format ):
i = 1
exists = True
while exists:
path = path_format % (i,)
exists = os.path.exists ( path )
i += 1
return path
| |
import os
import copy
from random import choice
from string import letters
from kivy.animation import Animation
from kivy.app import App
from kivy.base import EventLoop
from kivy.config import Config
from kivy.clock import Clock
from kivy.graphics import Color, BorderImage
from kivy.properties import StringProperty, NumericProperty, ObjectProperty
from kivy.uix.screenmanager import ScreenManager, NoTransition
from kivy.uix.widget import Widget
from constants.colors import *
from constants.misc import *
from letters import LetterGrid, Letter
from level import Level
from score import Score
from screens import (MenuScreen, GameScreen, GameOverScreen, HighscoresScreen,
SettingsScreen)
from storage.meowjson import SettingsJson
from storage.meowdb import MeowDatabase
from meow_letters import PROJECT_PATH
class Game(Widget):
""" This is the Game widget from the GameScreen.
All application workflow is defined here.
"""
tile_size = NumericProperty(10)
tile_padding = NumericProperty(10)
def __init__(self, **kwargs):
"""Game class initializer. Initializes the temporary grid.
"""
super(Game, self).__init__()
self.grid = [[None for i in range(GRID_SIZE)] for j in range(GRID_SIZE)]
self.letter_grid = LetterGrid(GRID_SIZE)
self.score = Score()
self.level = Level()
self.io = MeowDatabase()
def rebuild_background(self):
"""Rebuilds the canvas background and the elements
"""
self.canvas.before.clear()
with self.canvas.before:
Color(*BLUE)
BorderImage(pos=self.pos,
size=self.size,
source=os.path.join(PROJECT_PATH,
'assets/img/mask.png'))
Color(*LIGHTER_BLUE)
for ix, iy in self.letter_grid.iterate_pos():
BorderImage(pos=self.index_to_pos(ix, iy),
size=(self.tile_size, self.tile_size),
source=os.path.join(PROJECT_PATH,
'assets/img/mask.png'))
def reposition(self, *args):
self.rebuild_background()
# calculate the size of a letter
l = min(self.width, self.height)
padding = (l / float(GRID_SIZE)) / float(GRID_SIZE * 2)
tile_size = (l - (padding * (GRID_SIZE + 1))) / float(GRID_SIZE)
self.tile_size = tile_size
self.tile_padding = padding
for ix, iy, letter in self.iterate():
letter.size = tile_size, tile_size
letter.pos = self.index_to_pos(ix, iy)
def iterate(self):
"""Helper iterator. Iterates through all cells.
"""
for ix, iy in self.letter_grid.iterate_pos():
child = self.grid[ix][iy]
if child:
yield ix, iy, child
def iterate_empty(self):
"""Helper iterator. Iterates through empty cells.
"""
for ix, iy in self.letter_grid.iterate_pos():
child = self.grid[ix][iy]
if not child:
yield ix, iy
def index_to_pos(self, x, y):
"""Translates mathematical index in the grid to the exact
pixel position.
:param x: index on the x axis.
:param y: index on the y axis.
"""
padding = self.tile_padding
tile_size = self.tile_size
return [
(self.x + padding) + x * (tile_size + padding),
(self.y + padding) + y * (tile_size + padding)]
def pos_to_index(self, coordinates):
"""Translates the pixel coordinates into mathematical indexes.
:param coordinates: a tuple with (x, y) pixel coordinates.
"""
grid_length = (
self.tile_size + self.tile_padding) * GRID_SIZE + self.tile_padding
if coordinates[0] < 0 \
or coordinates[1] < 0 \
or coordinates[0] > grid_length \
or coordinates[1] > grid_length:
return (None, None)
unit = self.tile_size + self.tile_padding
x = int((coordinates[0] - self.tile_padding) / unit)
y = int((coordinates[1] - self.tile_padding) / unit)
return (x, y)
def spawn_rand_letter(self, *args):
"""Spawns a random letter on the board.
"""
empty = list(self.iterate_empty())
if not empty:
return None
ix, iy = choice(empty)
self.spawn_letter_at(ix, iy, choice(letters).upper())
def spawn_letter_at(self, x, y, value):
"""Spawns a letter to a predefined position.
:param x: index on X axis
:param y: index on Y axis
:param value: the letter
"""
if self.grid[x][y] is None \
or self.grid[x][y].letter != self.letter_grid[x][y].letter:
letter = LetterCell(
size=(self.tile_size, self.tile_size),
pos=self.index_to_pos(x, y),
letter=str(value))
self.remove_widget(self.grid[x][y])
self.grid[x][y] = letter
self.add_widget(letter)
def on_touch_down(self, touch):
"""Catches the touch event on the grid.
"""
relative_coordinates = self.to_widget(touch.pos[0], touch.pos[1], True)
x, y = self.pos_to_index(relative_coordinates)
if x is not None and y is not None:
self.toggle(x, y)
super(Game, self).on_touch_down(touch)
return True
def toggle(self, x, y):
game_screen = self.parent.parent.parent
decrement = Clock.create_trigger(game_screen.ids.timer.decrement)
letter = self.letter_grid[x][y]
if letter is not None:
if letter.is_selected():
self.letter_grid.chain.remove(letter)
else:
self.letter_grid.chain.add(letter)
if not self.letter_grid.chain.is_valid():
decrement()
self.letter_grid.chain.clear()
if self.letter_grid.is_complete_chain():
game_screen.ids.timer.reset()
self.update_grid()
def update_grid(self):
for x, y, letter in self.letter_grid.iterate():
if letter.is_selected():
self.grid[x][y].select()
else:
self.grid[x][y].unselect()
def end(self):
"""Shows a Game over screen inspired from 2048
"""
game_screen = self.parent.parent.parent
game_screen.end = True
self.save_highscore()
end = self.ids.end.__self__
self.remove_widget(end)
self.add_widget(end)
text = 'Game\nover!'
self.ids.end_label.text = text
Animation(opacity=1., d=.5).start(end)
def restart(self):
"""Restarts the game. Puts three random letters on the board.
"""
self.score.reset()
self.level.reset()
for ix, iy, child in self.iterate():
self.remove_widget(child)
self.grid = [[None for i in range(GRID_SIZE)] for j in range(GRID_SIZE)]
self.reposition()
self.letter_grid = LetterGrid(GRID_SIZE)
self.letter_grid.setup(3)
Clock.schedule_once(self.redraw)
self.ids.end.opacity = 0
if self.parent:
game_screen = self.parent.parent.parent
if game_screen.end == True:
game_screen.ids.timer.restart()
Clock.unschedule(game_screen.tick)
Clock.schedule_interval(game_screen.tick,
game_screen.ids.timer.interval)
game_screen.end = False
def resume(self, score, level, grid):
self.score.points = int(score)
self.level.set_level(int(level))
for ix, iy, child in self.iterate():
self.remove_widget(child)
self.grid = [[None for i in range(GRID_SIZE)] for j in range(GRID_SIZE)]
self.reposition()
letter_grid = copy.deepcopy(grid)
for i, row in enumerate(grid):
letter_grid[i] = [Letter(l) if l is not None else None for l in row]
self.letter_grid = LetterGrid(GRID_SIZE)
self.letter_grid.grid = letter_grid
Clock.schedule_once(self.redraw)
self.ids.end.opacity = 0
def redraw(self, *args):
for x, y in self.letter_grid.iterate_pos():
if self.letter_grid[x][y] is None:
if self.grid[x][y] is not None:
self.remove_widget(self.grid[x][y])
self.grid[x][y] = None
else:
self.spawn_letter_at(x, y, self.letter_grid[x][y].letter)
def cycle_end(self):
self.score.update(self.letter_grid.chain.length)
self.level.set_level(self.score.points)
self.letter_grid.cycle_end(self.level.level)
self.redraw()
self.update_grid()
def save_highscore(self):
settings = SettingsJson(
os.path.join(PROJECT_PATH, "data/settings.json"))
self.io.insert_highscore(settings.get_username(), self.score.points)
class Timer(Widget):
def __init__(self, **kwargs):
super(Timer, self).__init__()
self.redraw()
self.interval = 0.05
self.finished = False
def redraw(self):
self.opacity = 1
self.canvas.before.clear()
with self.canvas.before:
Color(*PINK)
BorderImage(pos=self.pos, size=self.size,
source=os.path.join(PROJECT_PATH,
'assets/img/mask.png'))
def tick(self, *args):
if self.size[0] < 0:
self.finished = True
width = self.parent.size[0]
self.size[0] -= width / (ROUND_SECONDS / self.interval)
self.redraw()
def restart(self):
self.finished = False
self.size[0] = self.parent.size[0]
def decrement(self, seconds=1):
width = self.parent.size[0]
self.size[0] -= width / ROUND_SECONDS
def reset(self):
self.finished = True
self.size[0] = self.parent.size[0]
class LetterCell(Widget):
""" This class represents single letter from the grid.
(WOW! The grid. So much TRON. Very Cycle. Such ISO.)
"""
letter = StringProperty('A')
scale = NumericProperty(.1)
bg_color = ObjectProperty(LIGHT_BROWN)
def __init__(self, **kwargs):
"""Letter class initializer. Animating letters like 2048.
"""
super(LetterCell, self).__init__(**kwargs)
anim = Animation(scale=1., d=.15, t='out_quad')
anim.bind(on_complete=self.clean_canvas)
anim.start(self)
def clean_canvas(self, *args):
self.canvas.before.clear()
self.canvas.after.clear()
def select(self):
self.bg_color = WHITE
def unselect(self):
self.bg_color = LIGHT_BROWN
class MeowLettersApp(App):
def on_start(self):
EventLoop.window.bind(on_keyboard=self.hook_keyboard)
def build(self):
self.manager = ScreenManager(transition=NoTransition())
self.manager.add_widget(MenuScreen(name='menu'))
self.manager.add_widget(GameScreen(name='game'))
self.manager.add_widget(GameOverScreen(name='gameover'))
self.manager.add_widget(HighscoresScreen(name='highscores'))
self.manager.add_widget(SettingsScreen(name='settings'))
return self.manager
def hook_keyboard(self, window, key, *args):
if key == BACK_KEY:
self.manager.current = 'menu'
return True
if __name__ == '__main__':
Config.set('graphics', 'width', '320')
Config.set('graphics', 'height', '480')
MeowLettersApp().run()
| |
# -*- coding: utf-8 -*-
"""
Tests of neo.io.blackrockio
"""
# needed for python 3 compatibility
from __future__ import absolute_import
try:
import unittest2 as unittest
except ImportError:
import unittest
from numpy.testing import assert_equal
import numpy as np
import quantities as pq
from neo.io.blackrockio import BlackrockIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.test.iotest.tools import get_test_file_full_path
# check scipy
try:
from distutils import version
import scipy.io
import scipy.version
except ImportError as err:
HAVE_SCIPY = False
SCIPY_ERR = err
else:
if version.LooseVersion(scipy.version.version) < '0.8':
HAVE_SCIPY = False
SCIPY_ERR = ImportError("your scipy version is too old to support " +
"MatlabIO, you need at least 0.8. " +
"You have %s" % scipy.version.version)
else:
HAVE_SCIPY = True
SCIPY_ERR = None
class CommonTests(BaseTestIO, unittest.TestCase):
ioclass = BlackrockIO
files_to_test = ['FileSpec2.3001']
files_to_download = [
'FileSpec2.3001.nev',
'FileSpec2.3001.ns5',
'FileSpec2.3001.ccf',
'FileSpec2.3001.mat']
ioclass = BlackrockIO
def test_inputs_V23(self):
"""
Test various inputs to BlackrockIO.read_block with version 2.3 file
to check for parsing errors.
"""
try:
b = BlackrockIO(
get_test_file_full_path(
ioclass=BlackrockIO,
filename='FileSpec2.3001',
directory=self.local_test_dir, clean=False),
verbose=False)
except:
self.fail()
# Load data to maximum extent, one None is not given as list
block = b.read_block(
n_starts=[None], n_stops=None, channels=range(1, 9),
nsx_to_load=5, units='all', load_events=True,
load_waveforms=False)
lena = len(block.segments[0].analogsignals[0])
numspa = len(block.segments[0].spiketrains[0])
# Load data using a negative time and a time exceeding the end of the
# recording
too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
block = b.read_block(
n_starts=[-100 * pq.ms], n_stops=[too_large_tstop],
channels=range(1, 9), nsx_to_load=[5], units='all',
load_events=False, load_waveforms=False)
lenb = len(block.segments[0].analogsignals[0])
numspb = len(block.segments[0].spiketrains[0])
# Same length of analog signal?
# Both should have read the complete data set!
self.assertEqual(lena, lenb)
# Same length of spike train?
# Both should have read the complete data set!
self.assertEqual(numspa, numspb)
# n_starts and n_stops not given as list
# verifies identical length of returned signals given equal durations
# as input
ns5_unit = block.segments[0].analogsignals[0].sampling_period
block = b.read_block(
n_starts=100 * ns5_unit, n_stops=200 * ns5_unit,
channels=range(1, 9), nsx_to_load=5, units='all',
load_events=False, load_waveforms=False)
lena = len(block.segments[0].analogsignals[0])
block = b.read_block(
n_starts=301 * ns5_unit, n_stops=401 * ns5_unit,
channels=range(1, 9), nsx_to_load=5, units='all',
load_events=False, load_waveforms=False)
lenb = len(block.segments[0].analogsignals[0])
# Same length?
self.assertEqual(lena, lenb)
# Length should be 100 samples exactly
self.assertEqual(lena, 100)
# Load partial data types and check if this is selection is made
block = b.read_block(
n_starts=None, n_stops=None, channels=range(1, 9),
nsx_to_load=5, units='none', load_events=False,
load_waveforms=True)
self.assertEqual(len(block.segments), 1)
self.assertEqual(len(block.segments[0].analogsignals), 8)
self.assertEqual(len(block.channel_indexes), 8)
self.assertEqual(len(block.channel_indexes[0].units), 0)
self.assertEqual(len(block.segments[0].events), 0)
self.assertEqual(len(block.segments[0].spiketrains), 0)
# NOTE: channel 6 does not contain any unit
block = b.read_block(
n_starts=[None, 3000 * pq.ms], n_stops=[1000 * pq.ms, None],
channels=range(1, 9), nsx_to_load='none',
units={1: 0, 5: 0, 6: 0}, load_events=True,
load_waveforms=True)
self.assertEqual(len(block.segments), 2)
self.assertEqual(len(block.segments[0].analogsignals), 0)
self.assertEqual(len(block.channel_indexes), 8)
self.assertEqual(len(block.channel_indexes[0].units), 1)
self.assertEqual(len(block.segments[0].events), 0)
self.assertEqual(len(block.segments[0].spiketrains), 2)
@unittest.skipUnless(HAVE_SCIPY, "requires scipy")
def test_compare_blackrockio_with_matlabloader(self):
"""
This test compares the output of ReachGraspIO.read_block() with the
output generated by a Matlab implementation of a Blackrock file reader
provided by the company. The output for comparison is provided in a
.mat file created by the script create_data_matlab_blackrock.m.
The function tests LFPs, spike times, and digital events on channels
80-83 and spike waveforms on channel 82, unit 1.
For details on the file contents, refer to FileSpec2.3.txt
"""
# Load data from Matlab generated files
ml = scipy.io.loadmat(
get_test_file_full_path(
ioclass=BlackrockIO,
filename='FileSpec2.3001.mat',
directory=self.local_test_dir, clean=False))
lfp_ml = ml['lfp'] # (channel x time) LFP matrix
ts_ml = ml['ts'] # spike time stamps
elec_ml = ml['el'] # spike electrodes
unit_ml = ml['un'] # spike unit IDs
wf_ml = ml['wf'] # waveform unit 1 channel 1
mts_ml = ml['mts'] # marker time stamps
mid_ml = ml['mid'] # marker IDs
# Load data in channels 1-3 from original data files using the Neo
# BlackrockIO
session = BlackrockIO(
get_test_file_full_path(
ioclass=BlackrockIO,
filename='FileSpec2.3001',
directory=self.local_test_dir, clean=False),
verbose=False)
block = session.read_block(
channels=range(1, 9), units='all', nsx_to_load='all',
scaling='raw', load_waveforms=True, load_events=True)
# Check if analog data on channels 1-8 are equal
self.assertGreater(len(block.channel_indexes), 0)
for chidx in block.channel_indexes:
# Should only have one AnalogSignal per ChannelIndex
self.assertEqual(len(chidx.analogsignals), 1)
idx = chidx.analogsignals[0].annotations['channel_id']
if idx in range(1, 9):
# We ignore the last sample of the Analogsignal returned by the
# Python implementation, since due to an error in the
# corresponding matlab loader the last sample was ignored and
# not saved to the test file
assert_equal(np.squeeze(
chidx.analogsignals[0].base[:-1]), lfp_ml[idx - 1, :])
# Check if spikes in channels 1,3,5,7 are equal
self.assertEqual(len(block.segments), 1)
for st_i in block.segments[0].spiketrains:
channelid = st_i.annotations['channel_id']
if channelid in range(1, 7, 2):
unitid = st_i.annotations['unit_id']
matlab_spikes = ts_ml[np.nonzero(
np.logical_and(elec_ml == channelid, unit_ml == unitid))]
assert_equal(st_i.base, matlab_spikes)
# Check waveforms of channel 1, unit 0
if channelid == 1 and unitid == 0:
assert_equal(np.squeeze(st_i.waveforms), wf_ml)
# Check if digital input port events are equal
self.assertGreater(len(block.segments[0].events), 0)
for ea_i in block.segments[0].events:
if ea_i.name == 'digital_input_port':
# Get all digital event IDs in this recording
marker_ids = set(ea_i.labels)
for marker_id in marker_ids:
python_digievents = ea_i.times.base[
ea_i.labels == marker_id]
matlab_digievents = mts_ml[
np.nonzero(mid_ml == int(marker_id))]
assert_equal(python_digievents, matlab_digievents)
# Note: analog input events are not yet supported
if __name__ == '__main__':
unittest.main()
| |
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 4 12:22:44 2017
@author: a.sancho.asensio
"""
import argparse
import base64
import json
from keras.models import model_from_json
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array
import re, sys
import os
import glob
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import cv2
import math
import pandas as pd
from sklearn.model_selection import train_test_split
from scipy.ndimage import convolve
from keras.utils import np_utils
from keras.optimizers import SGD, Adam, RMSprop
from keras.models import Model, Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten, Lambda
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import AveragePooling2D, MaxPooling2D
from keras.layers.pooling import GlobalAveragePooling2D
from keras.layers import Input, merge, ZeroPadding2D
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import LeakyReLU, ELU
import keras.backend as K
import tensorflow as tf
tf.python.control_flow_ops = tf
if os.name == 'nt': # We're on the Windows machine.
print(" > Loading paths for the Windows machine")
PATH = "C:/Users/a.sancho.asensio/Documents/PaperWork/nanodegree/git/simulator-windows-64/"
else: # Linux/MAC machine.
print(" > Loading paths for the Linux machine")
PATH = "/home/andreu/nanodegree/simulator-linux/"
train_path = PATH + "processedTrainData/"
augmented_path = PATH + "augmentedTrainData/"
test_path = PATH + "processedTestData/"
validation_path = PATH + "validation/"
def grep(s, pattern):
"""
Imitates grep.
:param s: is the input string.
:param pattern: is the pattern.
:return: the grep answer.
"""
return '\n'.join(re.findall(r'^.*%s.*?$'%pattern, s, flags=re.M))
def loadData(path_to_follow):
"""
It loads the images, assuming that these are in the /IMG/ subfolder.
Also, the output is in the CSV file "steering.csv".
:param path_to_follow: is the full path where the images are placed.
:return: a list with (1) a numpy array with the images in RGB color,
(2) a numpy array with the steering angle, (3) a numpy array
with the class label, and (4) the data logs.
"""
data_path = os.path.join(path_to_follow, "*.csv")
files = glob.glob(data_path)
data_log = pd.read_csv(files[0])
# Check special case of relative paths...
if len(grep(data_log['path'][0], "^\s*IMG.+")) > 10:
data_log['path'] = path_to_follow + data_log['path']
dataset = []
for f in data_log['path']:
img = mpimg.imread(f)
img = img.astype('uint8')
dataset.append(img)
del img
dataset = np.array(dataset)
labels = np.array(data_log['label'], dtype="uint8")
steering = np.array(data_log['steering'], dtype="float32")
return (dataset, steering, labels, data_log)
# Load the data set.
print(" > Loading the train set in dir", augmented_path)
train_data, train_steering, _, _ = loadData(augmented_path)
X_train, X_test, y_train, y_test = train_test_split(train_data,
train_steering,
test_size=0.1,
random_state=1337)
del train_data, train_steering
print(" > Loading the validation set.")
validation_data, validation_steering, validation_labels, _ = loadData(validation_path)
# Construct the model.
def simpLeNet(input_shape, learning_rate=0.001):
"""
It builds the simpleNet model.
:param input_shape: shape of the input tensor.
:param learning_rate: is the learning rate.
"""
model = Sequential()
# Start the normalization layers.
model.add(Lambda(lambda x: x/255.0 - 0.5, input_shape=(input_shape[1],
input_shape[2],
input_shape[3])))
model.add(Lambda(lambda x: x * 2.0))
# Start the mini-network for pre-processing color channels.
model.add(Convolution2D(10, 1, 1, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(LeakyReLU(alpha=0.48))
model.add(Convolution2D(3, 1, 1, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(LeakyReLU(alpha=0.48))
model.add(MaxPooling2D((2, 3), border_mode='same'))
# Start the image processing layers.
model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(MaxPooling2D((2, 2), border_mode='same'))
model.add(Dropout(0.25))
model.add(Convolution2D(64, 3, 3, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(Convolution2D(64, 3, 3, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(MaxPooling2D((2, 4), border_mode='same'))
model.add(Dropout(0.5))
# Start the regression net.
model.add(Convolution2D(256, 1, 1, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(Dropout(0.4))
model.add(Convolution2D(128, 1, 1, subsample=(1, 1), border_mode="same",
init="he_normal", dim_ordering="tf"))
model.add(ELU())
model.add(GlobalAveragePooling2D(dim_ordering='tf'))
model.add(Dense(1, init="he_normal"))
optimizer = Adam(lr=learning_rate)
model.compile(optimizer=optimizer, loss='mse')
return(model)
def fitModel(train_set, train_labels, file_name, test_set, test_labels,
validation_data, validation_steering,
batch_size=128, n_epoch=5, learning_rate=0.1, seed=17):
"""
It fits a model.
:param train_set: is the training data tensor.
:param train_labels: is the steering angle array.
:param file_name: is the output name (with path) for the model.
:param test_set: is the test set data tensor.
:param test_labels: is the test output.
:param validation_data: is the validation set data tensor.
:param validation_steering: is the valiadtion output.
:param batch_size: is the size of the mini-batch.
:param n_epoch: is the number of epoch to train the model.
:param learning_rate: is the learning rate.
:param seed: is the random seed.
:return: the trained model.
"""
np.random.seed(seed)
tf.set_random_seed(seed) # Tensorflow specific.
input_shape = train_set.shape
model = simpLeNet(input_shape, learning_rate)
print(model.summary())
model.fit(train_set, train_labels,
batch_size=batch_size,
nb_epoch=n_epoch,
validation_data=(test_set, test_labels),
shuffle=True)
print(" > Checking at epoch =", str(n_epoch))
img = validation_data[0]
img = img[None, :, :, :]
print(" > Left: ", model.predict(img, batch_size=1), " (it sould be ",
str(validation_steering[0]), ")")
img = validation_data[1]
img = img[None, :, :, :]
print(" > Right: ", model.predict(img, batch_size=1), " (it sould be ",
str(validation_steering[1]), ")")
img = validation_data[2]
img = img[None, :, :, :]
print(" > Center: ", model.predict(img, batch_size=1), " (it sould be ",
str(validation_steering[2]), ")")
# if file_name is provided, store the model.
if file_name != None:
model.save_weights(file_name + ".h5", overwrite=True)
json_string = model.to_json()
open((file_name + ".json"), 'w').write(json_string)
return(model)
# Train the model.
model = fitModel(train_set=X_train,
train_labels=y_train,
file_name=PATH + "model",
test_set=X_test,
test_labels=y_test,
validation_data=validation_data,
validation_steering=validation_steering,
batch_size=500,
n_epoch=200,
learning_rate=0.0001,
seed=1337)
| |
# Copyright (C) 2010-2011 gevent contributors. See LICENSE for details.
# Get the standard Python httplib as __httplib__, ensuring we get a version
# that hasn't already been modified by monkey patching of any of its members.
# HTTPSConnection must use the standard HTTPConnection because libevent-http
# does not currently support https.
import imp
__httplib__ = imp.load_module('__httplib__', *imp.find_module('httplib'))
from gevent import core
from gevent.hub import Waiter
__implements__ = [
'HTTPConnection',
'HTTPResponse',
]
__imports__ = [
'HTTPSConnection',
'HTTPException',
'InvalidURL',
]
__all__ = __implements__ + __imports__
InvalidURL = __httplib__.InvalidURL
HTTP_PORT = __httplib__.HTTP_PORT
HTTPException = __httplib__.HTTPException
HTTPSConnection = __httplib__.HTTPSConnection
EV_METHOD_TYPES = dict((name, id) for (id, name) in core.HTTP_method2name.items())
class RequestFailed(HTTPException):
pass
class HTTPMessage(object):
def __init__(self, headers):
self._headers = headers
self.dict = dict(headers)
def getheaders(self, name):
name = name.lower()
result = []
for key, value in self._headers:
if key == name:
result.append(value)
return result
# emulation of rfc822.Message (base class of httplib.HTTPMessage)
@property
def headers(self):
return [': '.join(item) for item in self._headers]
# Access as a dictionary (only finds *last* header of each type):
def __len__(self):
"""Get the number of headers in a message."""
return len(self.dict)
def __getitem__(self, name):
"""Get a specific header, as from a dictionary."""
return self.dict[name.lower()]
def get(self, name, default=None):
name = name.lower()
try:
return self.dict[name]
except KeyError:
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __iter__(self):
return iter(self.dict)
def keys(self):
"""Get all of a message's header field names."""
return self.dict.keys()
def values(self):
"""Get all of a message's header field values."""
return self.dict.values()
def items(self):
return self.dict.items()
def __str__(self):
return ''.join(self.headers)
class HTTPResponse(object):
def __init__(self, request, debuglevel=0):
self._request = request
self.debuglevel = debuglevel
self.version = request.major * 10 + request.minor
assert self.version, request
self.status = request.response_code
assert self.status, request
self.reason = request.response_code_line
self.headers = request.get_input_headers()
self.msg = HTTPMessage(self.headers)
if self.debuglevel > 0:
for (k, v) in self.getheaders():
print 'header:', k, v
def read(self, amt=-1):
return self._request.input_buffer.read(amt)
def getheader(self, name, default=None):
return self.msg.get(name, default)
def getheaders(self):
return self.msg.items()
def close(self):
self._request = None
class HTTPConnection(object):
response_class = HTTPResponse
default_port = HTTP_PORT
debuglevel = 0
def __init__(self, host, port=None, timeout=None):
self.timeout = timeout
self._set_hostport(host, port)
self.conn = None
self.resp = None
self._waiter = None
def _set_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']') # ipv6 addresses have [...]
if i > j:
try:
port = int(host[i+1:])
except ValueError:
raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
self.host = host
self.port = port
def set_debuglevel(self, level):
self.debuglevel = level
def request(self, method, uri, body=None, headers=None):
headers = headers or {}
self.resp = None
self.putrequest(method, uri)
for (k, v) in headers.iteritems():
self.putheader(k, v)
self.endheaders()
if hasattr(body, 'read'):
while True:
d = body.read(4096)
if not d: break
self.send(d)
elif body:
self.send(body)
self.getresponse()
def getresponse(self):
if self.resp is None:
self.conn.make_request(self.req, self.method, self.uri)
assert self._waiter is None, self._waiter
self._waiter = Waiter()
try:
self.resp = self._waiter.get()
finally:
self._waiter = None
return self.resp
def _callback(self, request):
waiter = self._waiter
self._waiter = None
if waiter is not None:
if request.response_code:
waiter.switch(self.response_class(request, debuglevel=self.debuglevel))
else:
# this seems to be evhttp bug
waiter.throw(RequestFailed)
def connect(self):
if self.conn: return
if self.debuglevel > 0:
print 'connect: (%s, %u)' % (self.host, self.port)
self.conn = core.http_connection.new(self.host, self.port)
if self.timeout is not None:
self.conn.set_timeout(int(min(1, self.timeout)))
def close(self):
self.resp = None
self.conn = None
def putrequest(self, request, selector, skip_host=None, skip_accept_encoding=None):
self.connect()
self.req = core.http_request_client(self._callback)
if not skip_host:
if self.port == HTTP_PORT:
self.putheader('Host', self.host)
else:
self.putheader('Host', '%s:%u' % (self.host, self.port))
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
self.method = EV_METHOD_TYPES[request]
self.uri = selector or '/'
def putheader(self, header, *args):
self.req.add_output_header(header, '\r\n\t'.join(args))
def endheaders(self):
pass
def send(self, data):
if self.debuglevel > 0:
print 'send:', repr(data)
self.req.output_buffer.write(data)
| |
#!/usr/bin/env python
from datetime import datetime
import json
from fabric.api import local, require, settings, task
from fabric.state import env
from termcolor import colored
import app_config
# Other fabfiles
import ap
import assets
import daemons
import data
import instagram
import issues
import liveblog
import render
import stack
import text
import theme
import utils
if app_config.DEPLOY_TO_SERVERS:
import servers
if app_config.DEPLOY_CRONTAB:
import cron_jobs
# Bootstrap can only be run once, then it's disabled
if app_config.PROJECT_SLUG == '$NEW_PROJECT_SLUG':
import bootstrap
"""
Base configuration
"""
env.user = app_config.SERVER_USER
env.forward_agent = True
env.hosts = []
env.settings = None
"""
Environments
Changing environment requires a full-stack test.
An environment points to both a server and an S3
bucket.
"""
@task
def production():
"""
Run as though on production.
"""
env.settings = 'production'
app_config.configure_targets(env.settings)
env.hosts = app_config.SERVERS
@task
def staging():
"""
Run as though on staging.
"""
env.settings = 'staging'
app_config.configure_targets(env.settings)
env.hosts = app_config.SERVERS
"""
Branches
Changing branches requires deploying that branch to a host.
"""
@task
def stable():
"""
Work on stable branch.
"""
env.branch = 'stable'
@task
def master():
"""
Work on development branch.
"""
env.branch = 'master'
@task
def branch(branch_name):
"""
Work on any specified branch.
"""
env.branch = branch_name
@task
def tests():
"""
Run Python unit tests.
"""
with settings(warn_only=True):
local('createdb elections14test')
local('nosetests')
"""
Deployment
Changes to deployment requires a full-stack test. Deployment
has two primary functions: Pushing flat files to S3 and deploying
code to a remote server if required.
"""
@task
def update():
"""
Update all application data not in repository (copy, assets, etc).
"""
text.update()
assets.sync()
#data.update()
@task
def deploy_server(remote='origin'):
"""
Deploy server code and configuration.
"""
if app_config.DEPLOY_TO_SERVERS:
require('branch', provided_by=[stable, master, branch])
if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
utils.confirm(
colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
)
servers.checkout_latest(remote)
servers.fabcast('text.update')
servers.fabcast('assets.sync')
if app_config.DEPLOY_CRONTAB:
servers.install_crontab()
if app_config.DEPLOY_SERVICES:
servers.deploy_confs()
@task
def deploy_client(remote='origin'):
"""
Render and deploy app to S3.
"""
require('settings', provided_by=[production, staging])
update()
render.render_all()
utils._gzip('www', '.gzip')
utils._deploy_to_s3()
@task
def deploy_liveblog():
"""
Deploy latest liveblog slides to S3.
"""
local('rm -rf .liveblog_slides_html .liveblog_slides_gzip')
render.render_liveblog()
utils._gzip('.liveblog_slides_html', '.liveblog_slides_gzip')
utils._deploy_to_s3('.liveblog_slides_gzip', copy_assets=False)
@task
def deploy_results():
"""
Deploy latest results slides to S3.
"""
local('rm -rf .results_slides_html .results_slides_gzip')
render.render_results()
utils._gzip('.results_slides_html', '.results_slides_gzip')
utils._deploy_to_s3('.results_slides_gzip', copy_assets=False)
@task
def deploy_states():
"""
Deploy latest state slides to S3.
"""
local('rm -rf .state_slides_html .state_slides_gzip')
render.render_states()
utils._gzip('.state_slides_html', '.state_slides_gzip')
utils._deploy_to_s3('.state_slides_gzip', copy_assets=False)
@task
def deploy_big_boards():
"""
Deploy big boards to S3.
"""
local('rm -rf .big_boards_html .big_boards_gzip')
render.render_big_boards()
utils._gzip('.big_boards_html', '.big_boards_gzip')
utils._deploy_to_s3('.big_boards_gzip', copy_assets=False)
@task
def deploy_bop():
"""
Deploy latest BOP.
"""
local('rm -rf .bop_html .bop_gzip')
render.render_bop()
utils._gzip('.bop_html', '.bop_gzip')
utils._deploy_to_s3('.bop_gzip', copy_assets=False)
@task
def deploy():
"""
Deploy the latest app to S3 and, if configured, to our servers.
"""
require('settings', provided_by=[production, staging])
deploy_server()
deploy_client()
@task
def reset_browsers():
"""
Deploy a timestamp so the client will reset their page. For bugfixes
"""
require('settings', provided_by=[production, staging])
payload = {}
now = datetime.now().strftime('%s')
payload['timestamp'] = now
payload['homepage'] = False
with open('www/live-data/timestamp.json', 'w') as f:
json.dump(now, f)
utils.deploy_json('www/live-data/timestamp.json', 'live-data/timestamp.json')
@task
def reset_browsers_to_homepage():
require('settings', provided_by=[production, staging])
payload = {}
now = datetime.now().strftime('%s')
payload['timestamp'] = now
payload['homepage'] = True
with open('www/live-data/timestamp.json', 'w') as f:
json.dump(payload, f)
utils.deploy_json('www/live-data/timestamp.json', 'live-data/timestamp.json')
"""
Destruction
Changes to destruction require setup/deploy to a test host in order to test.
Destruction should remove all files related to the project from both a remote
host and S3.
"""
@task
def shiva_the_destroyer():
"""
Deletes the app from s3
"""
require('settings', provided_by=[production, staging])
utils.confirm(
colored("You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')" % app_config.DEPLOYMENT_TARGET, "red")
)
with settings(warn_only=True):
sync = 'aws s3 rm %s --recursive --region "%s"'
for bucket in app_config.S3_BUCKETS:
local(sync % ('s3://%s/' % bucket['bucket_name'], bucket['region']))
if app_config.DEPLOY_TO_SERVERS:
servers.delete_project()
if app_config.DEPLOY_CRONTAB:
servers.uninstall_crontab()
if app_config.DEPLOY_SERVICES:
servers.nuke_confs()
| |
from __future__ import unicode_literals
from six.moves.builtins import str
from six.moves.builtins import object
from six import with_metaclass
# -*- coding: utf-8 -*-
import pytz
from django.contrib.contenttypes import fields
from django.db import models
from django.db.models.base import ModelBase
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
import datetime
from schedule.utils import EventListManager, get_model_bases
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from schedule.utils import object_content_type
class CalendarManager(models.Manager):
"""
>>> user1 = User(username='tony')
>>> user1.save()
"""
def get_calendar_for_object(self, obj, distinction=None):
"""
This function gets a calendar for an object. It should only return one
calendar. If the object has more than one calendar related to it (or
more than one related to it under a distinction if a distinction is
defined) an AssertionError will be raised. If none are returned it will
raise a DoesNotExistError.
>>> user = User.objects.get(username='tony')
>>> try:
... Calendar.objects.get_calendar_for_object(user)
... except Calendar.DoesNotExist:
... print("failed")
...
failed
Now if we add a calendar it should return the calendar
>>> calendar = Calendar(name='My Cal')
>>> calendar.save()
>>> calendar.create_relation(user)
>>> Calendar.objects.get_calendar_for_object(user)
<Calendar: My Cal>
Now if we add one more calendar it should raise an AssertionError
because there is more than one related to it.
If you would like to get more than one calendar for an object you should
use get_calendars_for_object (see below).
>>> calendar = Calendar(name='My 2nd Cal')
>>> calendar.save()
>>> calendar.create_relation(user)
>>> try:
... Calendar.objects.get_calendar_for_object(user)
... except AssertionError:
... print("failed")
...
failed
"""
calendar_list = self.get_calendars_for_object(obj, distinction)
if len(calendar_list) == 0:
raise Calendar.DoesNotExist("Calendar does not exist.")
elif len(calendar_list) > 1:
raise AssertionError("More than one calendars were found.")
else:
return calendar_list[0]
def get_or_create_calendar_for_object(self, obj, distinction=None, name=None):
"""
>>> user = User(username="jeremy")
>>> user.save()
>>> calendar = Calendar.objects.get_or_create_calendar_for_object(user, name = "Jeremy's Calendar")
>>> calendar.name
"Jeremy's Calendar"
"""
try:
return self.get_calendar_for_object(obj, distinction)
except Calendar.DoesNotExist:
if name is None:
calendar = Calendar(name=str(obj))
else:
calendar = Calendar(name=name)
calendar.slug = slugify(calendar.name)
calendar.save()
calendar.create_relation(obj, distinction)
return calendar
def get_calendars_for_object(self, obj, distinction=None):
"""
This function allows you to get calendars for a specific object
If distinction is set it will filter out any relation that doesnt have
that distinction.
"""
ct = object_content_type(obj)
if distinction:
dist_q = Q(calendarrelation__distinction=distinction)
else:
dist_q = Q()
return self.filter(dist_q, Q(calendarrelation__object_id=obj.id, calendarrelation__content_type=ct))
@python_2_unicode_compatible
class Calendar(with_metaclass(ModelBase, *get_model_bases())):
'''
This is for grouping events so that batch relations can be made to all
events. An example would be a project calendar.
name: the name of the calendar
events: all the events contained within the calendar.
>>> calendar = Calendar(name = 'Test Calendar')
>>> calendar.save()
>>> data = {
... 'title': 'Recent Event',
... 'start': datetime.datetime(2008, 1, 5, 0, 0),
... 'end': datetime.datetime(2008, 1, 10, 0, 0)
... }
>>> event = Event(**data)
>>> event.save()
>>> calendar.events.add(event)
>>> data = {
... 'title': 'Upcoming Event',
... 'start': datetime.datetime(2008, 1, 1, 0, 0),
... 'end': datetime.datetime(2008, 1, 4, 0, 0)
... }
>>> event = Event(**data)
>>> event.save()
>>> calendar.events.add(event)
>>> data = {
... 'title': 'Current Event',
... 'start': datetime.datetime(2008, 1, 3),
... 'end': datetime.datetime(2008, 1, 6)
... }
>>> event = Event(**data)
>>> event.save()
>>> calendar.events.add(event)
'''
name = models.CharField(_("name"), max_length=200)
slug = models.SlugField(_("slug"), max_length=200)
objects = CalendarManager()
class Meta(object):
verbose_name = _('calendar')
verbose_name_plural = _('calendar')
app_label = 'schedule'
def __str__(self):
return self.name
@property
def events(self):
return self.event_set
def create_relation(self, obj, distinction=None, inheritable=True):
"""
Creates a CalendarRelation between self and obj.
if Inheritable is set to true this relation will cascade to all events
related to this calendar.
"""
CalendarRelation.objects.create_relation(self, obj, distinction, inheritable)
def get_recent(self, amount=5, in_datetime=datetime.datetime.now, tzinfo=pytz.utc):
"""
This shortcut function allows you to get events that have started
recently.
amount is the amount of events you want in the queryset. The default is
5.
in_datetime is the datetime you want to check against. It defaults to
datetime.datetime.now
"""
return self.events.order_by('-start').filter(start__lt=timezone.now())[:amount]
def occurrences_after(self, date=None):
return EventListManager(self.events.all()).occurrences_after(date)
def get_absolute_url(self):
return reverse('calendar_home', kwargs={'calendar_slug': self.slug})
def add_event_url(self):
return reverse('calendar_create_event', args=[self.slug])
class CalendarRelationManager(models.Manager):
def create_relation(self, calendar, content_object, distinction=None, inheritable=True):
"""
Creates a relation between calendar and content_object.
See CalendarRelation for help on distinction and inheritable
"""
ct = object_content_type(content_object)
object_id = content_object.id
cr = CalendarRelation(
content_type=ct,
object_id=object_id,
calendar=calendar,
distinction=distinction,
content_object=content_object
)
cr.save()
return cr
@python_2_unicode_compatible
class CalendarRelation(with_metaclass(ModelBase, *get_model_bases())):
'''
This is for relating data to a Calendar, and possible all of the events for
that calendar, there is also a distinction, so that the same type or kind of
data can be related in different ways. A good example would be, if you have
calendars that are only visible by certain users, you could create a
relation between calendars and users, with the distinction of 'visibility',
or 'ownership'. If inheritable is set to true, all the events for this
calendar will inherit this relation.
calendar: a foreign key relation to a Calendar object.
content_type: a foreign key relation to ContentType of the generic object
object_id: the id of the generic object
content_object: the generic foreign key to the generic object
distinction: a string representing a distinction of the relation, User could
have a 'veiwer' relation and an 'owner' relation for example.
inheritable: a boolean that decides if events of the calendar should also
inherit this relation
DISCLAIMER: while this model is a nice out of the box feature to have, it
may not scale well. If you use this, keep that in mind.
'''
calendar = models.ForeignKey(Calendar, verbose_name=_("calendar"))
content_type = models.ForeignKey(ContentType)
object_id = models.IntegerField()
content_object = fields.GenericForeignKey('content_type', 'object_id')
distinction = models.CharField(_("distinction"), max_length=20, null=True)
inheritable = models.BooleanField(_("inheritable"), default=True)
objects = CalendarRelationManager()
class Meta(object):
verbose_name = _('calendar relation')
verbose_name_plural = _('calendar relations')
app_label = 'schedule'
def __str__(self):
return '%s - %s' % (self.calendar, self.content_object)
| |
# -*- coding: utf-8 -*-
"""
IDL Ref:
https://thrift.apache.org/docs/idl
"""
from __future__ import absolute_import
from ply import yacc
from .lexer import tokens, lexer # noqa
from .model import (
BASE_TYPE_MAPS,
Field,
Function,
IdentifierValue,
ListType,
MapType,
Service,
SetType,
Thrift
)
from .exc import ThriftGrammerError
def p_error(p):
raise ThriftGrammerError('Grammer error %r at line %d' %
(p.value, p.lineno))
def p_start(p):
'''start : header definition'''
def p_header(p):
'''header : header_unit header
|'''
def p_header_unit(p):
'''header_unit : include
| namespace'''
def p_include(p):
'''include : INCLUDE LITERAL'''
thrift.includes.append(p[2])
def p_namespace(p):
'''namespace : NAMESPACE namespace_scope IDENTIFIER'''
thrift.namespaces[p[3]] = p[2]
def p_namespace_scope(p):
'''namespace_scope : '*'
| IDENTIFIER'''
p[0] = p[1]
def p_sep(p):
'''sep : ','
| ';'
'''
def p_definition(p):
'''definition : definition definition_unit
|'''
def p_definition_unit(p):
'''definition_unit : const
| typedef
| enum
| struct
| union
| exception
| service
'''
def p_const(p):
'''const : CONST field_type IDENTIFIER '=' const_value '''
thrift.consts[p[3]] = p[2].cast(p[5])
def p_const_value(p):
'''const_value : INTCONSTANT
| DUBCONSTANT
| LITERAL
| BOOLCONSTANT
| identifier_value
| const_list
| const_map'''
p[0] = p[1]
def p_identifier_value(p):
'''identifier_value : IDENTIFIER'''
p[0] = IdentifierValue(p[1])
def p_const_list(p):
'''const_list : '[' const_value_seq ']' '''
p[0] = p[2]
def p_const_value_seq(p):
'''const_value_seq : const_value sep const_value_seq
| const_value const_value_seq
|'''
_parse_seq(p)
def p_const_map(p):
'''const_map : '{' const_map_seq '}' '''
p[0] = dict(p[2])
def p_const_map_items(p):
'''const_map_seq : const_map_item sep const_map_seq
| const_map_item const_map_seq
|
'''
_parse_seq(p)
def p_const_map_item(p):
'''const_map_item : const_value ':' const_value'''
p[0] = [p[1], p[3]]
def p_typedef(p):
'''typedef : TYPEDEF definition_type IDENTIFIER'''
thrift.typedefs[p[3]] = p[2]
def p_enum(p):
'''enum : ENUM IDENTIFIER '{' enum_seq '}' '''
if not p[4]:
thrift.enums[p[2]] = {}
else:
init_val = p[4][0][1]
vals = [-1 if init_val is None else init_val]
for item in p[4]:
if item[1] is None:
val = vals[-1] + 1
item[1] = val
vals.append(val)
vals.append(item[1])
dct = dict(p[4])
thrift.enums[p[2]] = dct
def p_enum_seq(p):
'''enum_seq : enum_item sep enum_seq
| enum_item enum_seq
|
'''
_parse_seq(p)
def p_enum_item(p):
'''enum_item : IDENTIFIER '=' INTCONSTANT
| IDENTIFIER'''
if len(p) == 4:
p[0] = [p[1], p[3]]
elif len(p) == 2:
p[0] = [p[1], None]
def p_struct(p):
'''struct : STRUCT IDENTIFIER '{' field_seq '}' '''
thrift.structs[p[2]] = p[4]
def p_union(p):
'''union : UNION IDENTIFIER '{' field_seq '}' '''
thrift.unions[p[2]] = p[4]
def p_exception(p):
'''exception : EXCEPTION IDENTIFIER '{' field_seq '}' '''
thrift.exceptions[p[2]] = p[4]
def p_service(p):
'''service : SERVICE IDENTIFIER '{' function_seq '}'
| SERVICE IDENTIFIER EXTENDS IDENTIFIER '{' function_seq '}'
'''
apis = {}
extends = p[4] if len(p) == 8 else None
functions = p[len(p) - 2]
for function in functions:
apis[function.name] = function
thrift.services[p[2]] = Service(extends=extends, apis=apis)
def p_function(p):
'''function : ONEWAY function_type IDENTIFIER '(' field_seq ')' throws
| ONEWAY function_type IDENTIFIER '(' field_seq ')'
| function_type IDENTIFIER '(' field_seq ')' throws
| function_type IDENTIFIER '(' field_seq ')'
'''
if len(p) == 8:
p[0] = Function(p[2], p[3], fields=p[5], throws=p[7], oneway=True)
elif len(p) == 7 and p[1] == 'oneway':
p[0] = Function(p[2], p[3], fields=p[5], throws=None, oneway=True)
elif len(p) == 7 and p[1] != 'oneway':
p[0] = Function(p[1], p[2], fields=p[4], throws=p[6], oneway=False)
elif len(p) == 6:
p[0] = Function(p[1], p[2], fields=p[4], throws=None, oneway=False)
def p_function_seq(p):
'''function_seq : function sep function_seq
| function function_seq
|
'''
_parse_seq(p)
def p_throws(p):
'''throws : THROWS '(' field_seq ')' '''
p[0] = p[3]
def p_function_type(p):
'''function_type : field_type
| VOID'''
p[0] = p[1]
def p_field_seq(p):
'''field_seq : field sep field_seq
| field field_seq
|
'''
_parse_seq(p)
def p_field(p):
'''field : field_id field_req field_type IDENTIFIER
| field_id field_req field_type IDENTIFIER '=' const_value'''
v = p[6] if len(p) == 7 else None
p[0] = Field(p[1], p[3], p[4], value=v, requirement=p[2])
def p_field_id(p):
'''field_id : INTCONSTANT ':' '''
p[0] = p[1]
def p_field_req(p):
'''field_req : REQUIRED
| OPTIONAL
|'''
if len(p) == 2:
p[0] = p[1]
def p_field_type(p):
'''field_type : IDENTIFIER
| base_type
| container_type'''
p[0] = p[1]
def p_base_type(p):
'''base_type : BOOL
| BYTE
| I16
| I32
| I64
| DOUBLE
| STRING
| BINARY'''
p[0] = BASE_TYPE_MAPS[p[1]]()
def p_container_type(p):
'''container_type : map_type
| list_type
| set_type'''
p[0] = p[1]
def p_map_type(p):
'''map_type : MAP '<' field_type ',' field_type '>' '''
p[0] = MapType(['map', p[3], p[5]])
def p_list_type(p):
'''list_type : LIST '<' field_type '>' '''
p[0] = ListType(['list', p[3]])
def p_set_type(p):
'''set_type : SET '<' field_type '>' '''
p[0] = SetType(['set', p[3]])
def p_definition_type(p):
'''definition_type : base_type
| container_type'''
p[0] = p[1]
parser = yacc.yacc(debug=False, write_tables=0)
thrift = None
def parse(data):
global thrift
thrift = Thrift()
lexer.lineno = 1
parser.parse(data)
return thrift
def _parse_seq(p):
if len(p) == 4:
p[0] = [p[1]] + p[3]
elif len(p) == 3:
p[0] = [p[1]] + p[2]
elif len(p) == 1:
p[0] = []
| |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for Apiserver in the development app server.
This is a fake apiserver proxy that does simple transforms on requests that
come in to /_ah/api and then re-dispatches them to /_ah/spi. It does not do
any authentication, quota checking, DoS checking, etc.
In addition, the proxy loads api configs from
/_ah/spi/BackendService.getApiConfigs prior to making the first call to the
backend at /_ah/spi and afterwards if app.yaml is changed.
"""
from __future__ import with_statement
import cgi
import cStringIO
try:
import json
except ImportError:
import simplejson as json
import logging
import mimetools
import re
API_SERVING_PATTERN = '/_ah/api/.*'
SPI_ROOT_FORMAT = 'http://127.0.0.1:%s/_ah/spi/%s'
_API_REST_PATH_FORMAT = '{!name}/{!version}/%s'
_PATH_VARIABLE_PATTERN = r'[a-zA-Z_][a-zA-Z_\d]*'
_RESERVED_PATH_VARIABLE_PATTERN = r'!' + _PATH_VARIABLE_PATTERN
_PATH_VALUE_PATTERN = r'[^:/?#\[\]{}]*'
class ApiRequest(object):
"""Simple data object representing an API request.
Takes an app_server CGI request and environment in the constructor.
Parses the request into convenient pieces and stores them as members.
"""
API_PREFIX = '/_ah/api/'
def __init__(self, base_env_dict, dev_appserver, request=None):
"""Constructor.
Args:
base_env_dict: Dictionary of CGI environment parameters.
dev_appserver: used to call standard SplitURL method.
request: AppServerRequest. Can be None.
"""
self.cgi_env = base_env_dict
self.headers = {}
self.http_method = base_env_dict['REQUEST_METHOD']
self.port = base_env_dict['SERVER_PORT']
if request:
self.path, self.query = dev_appserver.SplitURL(request.relative_url)
self.body = request.infile.read()
for header in request.headers.headers:
header_name, header_value = header.split(':', 1)
self.headers[header_name.strip()] = header_value.strip()
else:
self.body = ''
self.path = self.API_PREFIX
self.query = ''
assert self.path.startswith(self.API_PREFIX)
self.path = self.path[len(self.API_PREFIX):]
self.parameters = cgi.parse_qs(self.query)
self.body_obj = json.loads(self.body) if self.body else {}
self.request_id = None
def IsRpc(self):
return self.path.startswith('rpc')
class ApiConfigManager(object):
"""Manages loading api configs and method lookup."""
def __init__(self):
self.__rpc_method_dict = {}
self.__rest_methods = []
@staticmethod
def HasSpiEndpoint(config):
"""Checks if an SPI is registered with this App.
Args:
config: Parsed app.yaml as an appinfo proto.
Returns:
True if any handler is registered for (/_ah/spi/.*).
"""
return any(h.url.startswith('/_ah/spi/') for h in config.handlers)
def ParseApiConfigResponse(self, body):
"""Parses a json api config and registers methods for dispatch.
Side effects:
Parses method name, etc for all methods and updates the indexing
datastructures with the information.
Args:
body: body of getApiConfigs response
"""
try:
response_obj = json.loads(body)
except ValueError, unused_err:
logging.error('Can not parse BackendService.getApiConfigs response: %s',
body)
else:
for api_config_json in response_obj.get('items', []):
try:
config = json.loads(api_config_json)
except ValueError, unused_err:
logging.error('Can not parse API config: %s',
api_config_json)
else:
version = config.get('version', '')
if config.has_key('methods'):
for method_name, method in config.get('methods', {}).iteritems():
method['api'] = config
self.SaveRpcMethod(method_name, version, method)
self.SaveRestMethod(method_name, version, method)
@staticmethod
def CompilePathPattern(pattern):
"""Generates a compiled regex pattern for a path pattern.
e.g. '/{!name}/{!version}/notes/{id}'
returns re.compile(r'/([^:/?#\[\]{}]*)'
r'/([^:/?#\[\]{}]*)'
r'/notes/(?P<id>[^:/?#\[\]{}]*)')
Note in this example that !name and !version are reserved variable names
used to match the API name and version that should not be migrated into the
method argument namespace. As such they are not named in the regex, so
groupdict() excludes them.
Args:
pattern: parameterized path pattern to be checked
Returns:
compiled regex to match this path pattern
"""
def ReplaceReservedVariable(match):
"""Replaces a {!variable} with a regex to match it not by name.
Args:
match: The matching regex group as sent by re.sub()
Returns:
Regex to match the variable by name, if the full pattern was matched.
"""
if match.lastindex > 1:
return '%s(%s)' % (match.group(1), _PATH_VALUE_PATTERN)
return match.group(0)
def ReplaceVariable(match):
"""Replaces a {variable} with a regex to match it by name.
Args:
match: The matching regex group as sent by re.sub()
Returns:
Regex to match the variable by name, if the full pattern was matched.
"""
if match.lastindex > 1:
return '%s(?P<%s>%s)' % (match.group(1), match.group(2),
_PATH_VALUE_PATTERN)
return match.group(0)
pattern = re.sub('(/|^){(%s)}(?=/|$)' % _RESERVED_PATH_VARIABLE_PATTERN,
ReplaceReservedVariable, pattern, 2)
pattern = re.sub('(/|^){(%s)}(?=/|$)' % _PATH_VARIABLE_PATTERN,
ReplaceVariable, pattern)
return re.compile(pattern + '$')
def SaveRpcMethod(self, method_name, version, method):
"""Store JsonRpc api methods in a map for lookup at call time.
(rpcMethodName, apiVersion) => method.
Args:
method_name: Name of the API method
version: Version of the API
method: method descriptor (as in the api config file).
"""
self.__rpc_method_dict[(method_name, version)] = method
def LookupRpcMethod(self, method_name, version):
"""Lookup the JsonRPC method at call time.
The method is looked up in self.__rpc_method_dict, the dictionary that
it is saved in for SaveRpcMethod().
Args:
method_name: String name of the method
version: String version of the API
Returns:
Method descriptor as specified in the API configuration.
"""
method = self.__rpc_method_dict.get((method_name, version))
return method
def SaveRestMethod(self, method_name, version, method):
"""Store Rest api methods in a list for lookup at call time.
The list is self.__rest_methods, a list of tuples:
[(<compiled_path>, <path_pattern>, <method_dict>), ...]
where:
<compiled_path> is a compiled regex to match against the incoming URL
<path_pattern> is a string representing the original path pattern,
checked on insertion to prevent duplicates. -and-
<method_dict> is a dict (httpMethod, apiVersion) => (method_name, method)
This structure is a bit complex, it supports use in two contexts:
Creation time:
- SaveRestMethod is called repeatedly, each method will have a path,
which we want to be compiled for fast lookup at call time
- We want to prevent duplicate incoming path patterns, so store the
un-compiled path, not counting on a compiled regex being a stable
comparison as it is not documented as being stable for this use.
- Need to store the method that will be mapped at calltime.
- Different methods may have the same path but different http method.
and/or API versions.
Call time:
- Quickly scan through the list attempting .match(path) on each
compiled regex to find the path that matches.
- When a path is matched, look up the API version and method from the
request and get the method name and method config for the matching
API method and method name.
Args:
method_name: Name of the API method
version: Version of the API
method: method descriptor (as in the api config file).
"""
path_pattern = _API_REST_PATH_FORMAT % method.get('path', '')
http_method = method.get('httpMethod', '').lower()
for _, path, methods in self.__rest_methods:
if path == path_pattern:
methods[(http_method, version)] = method_name, method
break
else:
self.__rest_methods.append(
(self.CompilePathPattern(path_pattern),
path_pattern,
{(http_method, version): (method_name, method)}))
def LookupRestMethod(self, path, http_method):
"""Look up the rest method at call time.
The method is looked up in self.__rest_methods, the list it is saved
in for SaveRestMethod.
Args:
path: Path from the URL of the request.
http_method: HTTP method of the request.
Returns:
Tuple of (<method name>, <method>, <params>)
Where:
<method name> is the string name of the method that was matched.
<method> is the descriptor as specified in the API configuration. -and-
<params> is a dict of path parameters matched in the rest request.
"""
for compiled_path_pattern, unused_path, methods in self.__rest_methods:
match = compiled_path_pattern.match(path)
if match:
params = match.groupdict()
version = match.group(2)
method_key = (http_method.lower(), version)
method_name, method = methods.get(method_key, (None, None))
if method is not None:
break
else:
method_name = None
method = None
params = None
return method_name, method, params
def CreateApiserverDispatcher(config_manager=None):
"""Function to create Apiserver dispatcher.
Args:
config_manager: Allow setting of ApiConfigManager for testing.
Returns:
New dispatcher capable of handling requests to the built-in apiserver
handlers.
"""
from google.appengine.tools import dev_appserver
class ApiserverDispatcher(dev_appserver.URLDispatcher):
"""Dispatcher that handles requests to the built-in apiserver handlers."""
class RequestState(object):
"""Enum tracking request state."""
INIT = 0
GET_API_CONFIGS = 1
SPI_CALL = 2
END = 3
def __init__(self, config_manager=None, *args, **kwargs):
self._request_stage = self.RequestState.INIT
if config_manager is None:
config_manager = ApiConfigManager()
self.config_manager = config_manager
dev_appserver.URLDispatcher.__init__(self, *args, **kwargs)
def Dispatch(self,
request,
outfile,
base_env_dict=None):
"""Handles dispatch to apiserver handlers.
base_env_dict should contain at least:
REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,
SERVER_PROTOCOL, SERVER_PORT
Args:
request: AppServerRequest.
outfile: The response file.
base_env_dict: Dictionary of CGI environment parameters if available.
Defaults to None.
Returns:
AppServerRequest internal redirect for normal calls or
None for error conditions (e.g. method not found -> 404)
"""
if self._request_stage != self.RequestState.INIT:
return self.FailRequest('Dispatch in unexpected state', outfile)
if not base_env_dict:
return self.FailRequest('CGI Environment Not Available', outfile)
self.request = ApiRequest(base_env_dict, dev_appserver, request)
self._request_stage = self.RequestState.GET_API_CONFIGS
return self.GetApiConfigs(base_env_dict, dev_appserver)
def EndRedirect(self, dispatched_output, outfile):
"""Handle the end of getApiConfigs and SPI complete notification.
This EndRedirect is called twice.
The first time is upon completion of the BackendService.getApiConfigs()
call. After this call, the set of all available methods and their
parameters / paths / config is contained in dispatched_output. This is
parsed and used to dispatch the request to the SPI backend itself.
In order to cause a second dispatch and EndRedirect, this EndRedirect
will return an AppServerRequest filled out with the SPI backend request.
The second time it is called is upon completion of the call to the SPI
backend. After this call, if the initial request (sent in Dispatch, prior
to getApiConfigs) is used to reformat the response as needed. This
currently only results in changes for JsonRPC requests, where the response
body is moved into {'result': response_body_goes_here} and the request id
is copied back into the response.
Args:
dispatched_output: resulting output from the SPI
outfile: final output file for this handler
Returns:
An AppServerRequest for redirect or None for an immediate response.
"""
if self._request_stage == self.RequestState.GET_API_CONFIGS:
if self.HandleGetApiConfigsResponse(dispatched_output, outfile):
return self.CallSpi(outfile)
elif self._request_stage == self.RequestState.SPI_CALL:
return self.HandleSpiResponse(dispatched_output, outfile)
else:
return self.FailRequest('EndRedirect in unexpected state', outfile)
def GetApiConfigs(self, cgi_env, dev_appserver):
"""Makes a call to BackendService.getApiConfigs and parses result.
Args:
cgi_env: CGI environment dictionary as passed in by the framework
dev_appserver: dev_appserver instance used to generate AppServerRequest.
Returns:
AppServerRequest to be returned as an internal redirect to getApiConfigs
"""
request = ApiRequest(cgi_env, dev_appserver)
request.path = 'BackendService.getApiConfigs'
request.body = '{}'
return BuildCGIRequest(cgi_env, request, dev_appserver)
@staticmethod
def VerifyResponse(response, status_code, content_type=None):
"""Verifies that a response has the expected status and content type.
Args:
response: Response to be checked.
status_code: HTTP status code to be compared with response status.
content_type: acceptable Content-Type: header value, None allows any.
Returns:
True if both status_code and content_type match, else False.
"""
if response.status_code != status_code:
return False
if content_type is None:
return True
for header in response.headers:
if header.lower() == 'content-type':
return response.headers[header].lower() == content_type
else:
return False
@staticmethod
def ParseCgiResponse(response):
"""Parses a CGI response, returning a headers dict and body.
Args:
response: a CGI response
Returns:
tuple of ({header: header_value, ...}, body)
"""
header_dict = {}
for header in response.headers.headers:
header_name, header_value = header.split(':', 1)
header_dict[header_name.strip()] = header_value.strip()
if response.body:
body = response.body.read()
else:
body = ''
return header_dict, body
def HandleGetApiConfigsResponse(self, dispatched_output, outfile):
"""Parses the result of getApiConfigs, returning True on success.
Args:
dispatched_output: Output from the getApiConfigs call handler.
outfile: CGI output handle, used for error conditions.
Returns:
True on success, False on failure
"""
response = dev_appserver.RewriteResponse(dispatched_output)
if self.VerifyResponse(response, 200, 'application/json'):
self.config_manager.ParseApiConfigResponse(response.body.read())
return True
else:
self.FailRequest('BackendService.getApiConfigs Error', outfile)
return False
def CallSpi(self, outfile):
"""Generate SPI call (from earlier-saved request).
Side effects:
self.request is modified from Rest/JsonRPC format to apiserving format.
Args:
outfile: File to write out CGI-style response in case of error.
Returns:
AppServerRequest for redirect or None to send immediate CGI response.
"""
if self.request.IsRpc():
method = self.LookupRpcMethod()
params = None
else:
method, params = self.LookupRestMethod()
if method:
self.TransformRequest(params)
self._request_stage = self.RequestState.SPI_CALL
return BuildCGIRequest(self.request.cgi_env, self.request,
dev_appserver)
else:
self._request_stage = self.RequestState.END
return SendCGIResponse('404', {'Content-Type': 'text/plain'},
'Not Found', outfile)
def HandleSpiResponse(self, dispatched_output, outfile):
"""Handle SPI response, transforming output as needed.
Args:
dispatched_output: Response returned by SPI backend.
outfile: File-like object to write transformed result.
Returns:
None
"""
response = dev_appserver.AppServerResponse(
response_file=dispatched_output)
headers, body = self.ParseCgiResponse(response)
if self.request.IsRpc():
body = self.TransformJsonrpcResponse(body)
self._request_stage = self.RequestState.END
return SendCGIResponse(response.status_code, headers, body, outfile)
def FailRequest(self, message, outfile):
"""Write an immediate failure response to outfile, no redirect.
Args:
message: Error message to be displayed to user (plain text).
outfile: File-like object to write CGI response to.
Returns:
None
"""
self._request_stage = self.RequestState.END
return SendCGIResponse('500', {'Content-Type': 'text/plain'},
message, outfile)
def LookupRestMethod(self):
"""Looks up and returns rest method for the currently-pending request.
This method uses self.request as the currently-pending request.
Returns:
tuple of (method, parameters)
"""
method_name, method, params = self.config_manager.LookupRestMethod(
self.request.path, self.request.http_method)
self.request.method_name = method_name
return method, params
def LookupRpcMethod(self):
"""Looks up and returns RPC method for the currently-pending request.
This method uses self.request as the currently-pending request.
Returns:
RPC method that was found for the current request.
"""
if not self.request.body_obj:
return None
method_name = self.request.body_obj.get('method', '')
version = self.request.body_obj.get('apiVersion', '')
self.request.method_name = method_name
return self.config_manager.LookupRpcMethod(method_name, version)
def TransformRequest(self, params):
"""Transforms self.request to apiserving request.
This method uses self.request to determint the currently-pending request.
This method accepts a rest-style or RPC-style request.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
Args:
params: Path parameters dictionary for rest request
"""
if self.request.IsRpc():
self.TransformJsonrpcRequest()
else:
self.TransformRestRequest(params)
self.request.path = self.request.method_name
def TransformRestRequest(self, params):
"""Translates a Rest request/response into an apiserving request/response.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
Args:
params: URL path parameter dict extracted by config_manager lookup.
"""
body_obj = json.loads(self.request.body or '{}')
if params:
body_obj.update(params)
self.request.body = json.dumps(body_obj)
def TransformJsonrpcRequest(self):
"""Translates a JsonRpc request/response into apiserving request/response.
Side effects:
Updates self.request to apiserving format. (e.g. updating path to be the
method name, and moving request parameters to the body.)
"""
body_obj = json.loads(self.request.body) if self.request.body else {}
self.request.request_id = body_obj.get('id')
body_obj = body_obj.get('params', {})
self.request.body = json.dumps(body_obj)
def TransformJsonrpcResponse(self, response_body):
"""Translates a apiserving response to a JsonRpc response.
Side effects:
Updates self.request to JsonRpc format. (e.g. restoring request id
and moving body object into {'result': body_obj}
Args:
response_body: Backend response to transform back to JsonRPC
Returns:
Updated, JsonRPC-formatted request body
"""
body_obj = {'result': json.loads(response_body)}
if self.request.request_id is not None:
body_obj['id'] = self.request.request_id
return json.dumps(body_obj)
return ApiserverDispatcher(config_manager)
def BuildCGIRequest(base_env_dict, request, dev_appserver):
"""Build a CGI request to Call a method on an SPI backend.
Args:
base_env_dict: CGI environment dict
request: ApiRequest to be converted to a CGI request
dev_appserver: Handle to dev_appserver to generate CGI request.
Returns:
dev_appserver.AppServerRequest internal redirect object
"""
if request.headers is None:
request.headers = {}
request.headers['Content-Type'] = 'application/json'
url = SPI_ROOT_FORMAT % (request.port, request.path)
base_env_dict['REQUEST_METHOD'] = 'POST'
header_outfile = cStringIO.StringIO()
body_outfile = cStringIO.StringIO()
WriteHeaders(request.headers, header_outfile, len(request.body))
body_outfile.write(request.body)
header_outfile.seek(0)
body_outfile.seek(0)
return dev_appserver.AppServerRequest(
url, None, mimetools.Message(header_outfile), body_outfile)
def WriteHeaders(headers, outfile, content_len=None):
"""Write headers to the output file, updating content length if needed.
Args:
headers: Header dict to be written
outfile: File-like object to send headers to
content_len: Optional updated content length to update content-length with
"""
wrote_content_length = False
for header, value in headers.iteritems():
if header.lower() == 'content-length' and content_len is not None:
value = content_len
wrote_content_length = True
outfile.write('%s: %s\r\n' % (header, value))
if not wrote_content_length and content_len:
outfile.write('Content-Length: %s\r\n' % content_len)
def SendCGIResponse(status, headers, content, outfile):
"""Dump reformatted response to CGI outfile.
Args:
status: HTTP status code to send
headers: Headers dictionary {header_name: header_value, ...}
content: Body content to write
outfile: File-like object where response will be written.
Returns:
None
"""
outfile.write('Status: %s\r\n' % status)
WriteHeaders(headers, outfile, len(content))
outfile.write('\r\n')
outfile.write(content)
outfile.seek(0)
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from keystone.backends.sqlalchemy import get_session, models, aliased
from keystone.backends import api
from keystone.models import Tenant
# pylint: disable=E1103,W0221
class TenantAPI(api.BaseTenantAPI):
def __init__(self, *args, **kw):
super(TenantAPI, self).__init__(*args, **kw)
# pylint: disable=W0221
@staticmethod
def transpose(values):
""" Handles transposing field names from Keystone model to
sqlalchemy mode
Differences:
desc <-> description
id <-> uid (coming soon)
"""
if 'id' in values:
values['uid'] = values['id']
del values['id']
if 'description' in values:
values['desc'] = values['description']
del values['description']
if 'enabled' in values:
if values['enabled'] in [1, 'true', 'True', True]:
values['enabled'] = True
else:
values['enabled'] = False
@staticmethod
def to_model(ref):
""" Returns Keystone model object based on SQLAlchemy model"""
if ref:
return Tenant(id=ref.uid, name=ref.name, description=ref.desc,
enabled=bool(ref.enabled))
@staticmethod
def to_model_list(refs):
return [TenantAPI.to_model(ref) for ref in refs]
def create(self, values):
data = values.copy()
TenantAPI.transpose(data)
tenant_ref = models.Tenant()
tenant_ref.update(data)
if tenant_ref.uid is None:
tenant_ref.uid = uuid.uuid4().hex
tenant_ref.save()
return TenantAPI.to_model(tenant_ref)
def get(self, id, session=None):
"""Returns a tenant by ID.
.warning::
Internally, the provided ID is matched against the ``tenants.UID``,
not the PK (``tenants.id``) column.
For PK lookups from within the sqlalchemy backend,
use ``_get_by_id()`` instead.
"""
if id is None:
return None
session = session or get_session()
result = session.query(models.Tenant).filter_by(uid=id).first()
return TenantAPI.to_model(result)
@staticmethod
def _get_by_id(id, session=None):
"""Returns a tenant by ID (PK).
.warning::
The provided ID is matched against the PK (``tenants.ID``).
This is **only** for use within the sqlalchemy backend.
"""
if id is None:
return None
session = session or get_session()
return session.query(models.Tenant).filter_by(id=id).first()
@staticmethod
def id_to_uid(id, session=None):
if id is None:
return None
session = session or get_session()
tenant = session.query(models.Tenant).filter_by(id=id).first()
return tenant.uid if tenant else None
@staticmethod
def uid_to_id(uid, session=None):
if uid is None:
return None
session = session or get_session()
tenant = session.query(models.Tenant).filter_by(uid=uid).first()
return tenant.id if tenant else None
def get_by_name(self, name, session=None):
session = session or get_session()
result = session.query(models.Tenant).filter_by(name=name).first()
return TenantAPI.to_model(result)
def get_all(self, session=None):
if not session:
session = get_session()
results = session.query(models.Tenant).all()
return TenantAPI.to_model_list(results)
def list_for_user_get_page(self, user_id, marker, limit, session=None):
if not session:
session = get_session()
user = api.USER.get(user_id)
if hasattr(api.USER, 'uid_to_id'):
backend_user_id = api.USER.uid_to_id(user_id)
else:
backend_user_id = user_id
ura = aliased(models.UserRoleAssociation)
tenant = aliased(models.Tenant)
q1 = session.query(tenant).join((ura, ura.tenant_id == tenant.id)).\
filter(ura.user_id == backend_user_id)
if 'tenant_id' in user:
if hasattr(api.TENANT, 'uid_to_id'):
backend_tenant_id = api.TENANT.uid_to_id(user.tenant_id)
else:
backend_tenant_id = user.tenant_id
q2 = session.query(tenant).filter(tenant.id == backend_tenant_id)
q3 = q1.union(q2)
else:
q3 = q1
if marker:
results = q3.filter("tenant.id>:marker").params(
marker='%s' % marker).order_by(
tenant.id.desc()).limit(int(limit)).all()
else:
results = q3.order_by(tenant.id.desc()).limit(int(limit)).all()
return TenantAPI.to_model_list(results)
# pylint: disable=R0912
def list_for_user_get_page_markers(self, user_id, marker, limit,
session=None):
if not session:
session = get_session()
user = api.USER.get(user_id)
if hasattr(api.USER, 'uid_to_id'):
backend_user_id = api.USER.uid_to_id(user_id)
else:
backend_user_id = user_id
ura = aliased(models.UserRoleAssociation)
tenant = aliased(models.Tenant)
q1 = session.query(tenant).join((ura, ura.tenant_id == tenant.id)).\
filter(ura.user_id == backend_user_id)
if 'tenant_id' in user:
if hasattr(api.TENANT, 'uid_to_id'):
backend_tenant_id = api.TENANT.uid_to_id(user.tenant_id)
else:
backend_tenant_id = user.tenant_id
q2 = session.query(tenant).filter(tenant.id == backend_tenant_id)
q3 = q1.union(q2)
else:
q3 = q1
first = q3.order_by(tenant.id).first()
last = q3.order_by(tenant.id.desc()).first()
if first is None:
return (None, None)
if marker is None:
marker = first.id
next_page = q3.filter(tenant.id > marker).order_by(
tenant.id).limit(int(limit)).all()
prev_page = q3.filter(tenant.id > marker).order_by(
tenant.id.desc()).limit(int(limit)).all()
if len(next_page) == 0:
next_page = last
else:
for t in next_page:
next_page = t
if len(prev_page) == 0:
prev_page = first
else:
for t in prev_page:
prev_page = t
if prev_page.id == marker:
prev_page = None
else:
prev_page = prev_page.id
if next_page.id == last.id:
next_page = None
else:
next_page = next_page.id
return (prev_page, next_page)
def get_page(self, marker, limit, session=None):
if not session:
session = get_session()
if marker:
tenants = session.query(models.Tenant).\
filter("id>:marker").params(
marker='%s' % marker).order_by(
models.Tenant.id.desc()).limit(int(limit)).all()
else:
tenants = session.query(models.Tenant).order_by(
models.Tenant.id.desc()).limit(int(limit)).all()
return self.to_model_list(tenants)
# pylint: disable=R0912
def get_page_markers(self, marker, limit, session=None):
if not session:
session = get_session()
first = session.query(models.Tenant).order_by(
models.Tenant.id).first()
last = session.query(models.Tenant).order_by(
models.Tenant.id.desc()).first()
if first is None:
return (None, None)
if marker is None:
marker = first.id
next_page = session.query(models.Tenant).\
filter("id > :marker").\
params(marker='%s' % marker).\
order_by(models.Tenant.id).\
limit(int(limit)).\
all()
prev_page = session.query(models.Tenant).\
filter("id < :marker").\
params(marker='%s' % marker).\
order_by(models.Tenant.id.desc()).\
limit(int(limit)).\
all()
if len(next_page) == 0:
next_page = last
else:
for t in next_page:
next_page = t
if len(prev_page) == 0:
prev_page = first
else:
for t in prev_page:
prev_page = t
if prev_page.id == marker:
prev_page = None
else:
prev_page = prev_page.id
if next_page.id == last.id:
next_page = None
else:
next_page = next_page.id
return (prev_page, next_page)
def is_empty(self, id, session=None):
if not session:
session = get_session()
if hasattr(api.TENANT, 'uid_to_id'):
id = self.uid_to_id(id)
a_user = session.query(models.UserRoleAssociation).filter_by(
tenant_id=id).first()
if a_user is not None:
return False
a_user = session.query(models.User).filter_by(tenant_id=id).first()
if a_user is not None:
return False
return True
def update(self, id, values, session=None):
if not session:
session = get_session()
if hasattr(api.TENANT, 'uid_to_id'):
pkid = self.uid_to_id(id)
else:
pkid = id
data = values.copy()
TenantAPI.transpose(data)
with session.begin():
tenant_ref = self._get_by_id(pkid, session)
tenant_ref.update(data)
tenant_ref.save(session=session)
return self.get(id, session)
def delete(self, id, session=None):
if not session:
session = get_session()
if not self.is_empty(id):
raise fault.ForbiddenFault("You may not delete a tenant that "
"contains users")
if hasattr(api.TENANT, 'uid_to_id'):
id = self.uid_to_id(id)
with session.begin():
tenant_ref = self._get_by_id(id, session)
session.delete(tenant_ref)
def get_all_endpoints(self, tenant_id, session=None):
if not session:
session = get_session()
if hasattr(api.TENANT, 'uid_to_id'):
tenant_id = self.uid_to_id(tenant_id)
endpoint_templates = aliased(models.EndpointTemplates)
q = session.query(endpoint_templates).\
filter(endpoint_templates.is_global == True)
if tenant_id:
ep = aliased(models.Endpoints)
q1 = session.query(endpoint_templates).join((ep,
ep.endpoint_template_id == endpoint_templates.id)).\
filter(ep.tenant_id == tenant_id)
q = q.union(q1)
return q.all()
def get_role_assignments(self, tenant_id, session=None):
if not session:
session = get_session()
if hasattr(api.TENANT, 'uid_to_id'):
tenant_id = TenantAPI.uid_to_id(tenant_id)
results = session.query(models.UserRoleAssociation).\
filter_by(tenant_id=tenant_id)
for result in results:
if hasattr(api.USER, 'uid_to_id'):
result.user_id = api.USER.id_to_uid(result.user_id)
if hasattr(api.TENANT, 'uid_to_id'):
result.tenant_id = api.TENANT.id_to_uid(result.tenant_id)
return results
def get():
return TenantAPI()
| |
"""
Component that will help set the microsoft face for verify processing.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/image_processing.microsoft_face_identify/
"""
import asyncio
import logging
import voluptuous as vol
from homeassistant.core import split_entity_id, callback
from homeassistant.const import STATE_UNKNOWN
from homeassistant.exceptions import HomeAssistantError
from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE
from homeassistant.components.image_processing import (
PLATFORM_SCHEMA, ImageProcessingEntity, CONF_CONFIDENCE, CONF_SOURCE,
CONF_ENTITY_ID, CONF_NAME, ATTR_ENTITY_ID, ATTR_CONFIDENCE)
import homeassistant.helpers.config_validation as cv
from homeassistant.util.async import run_callback_threadsafe
DEPENDENCIES = ['microsoft_face']
_LOGGER = logging.getLogger(__name__)
EVENT_DETECT_FACE = 'image_processing.detect_face'
ATTR_NAME = 'name'
ATTR_TOTAL_FACES = 'total_faces'
ATTR_AGE = 'age'
ATTR_GENDER = 'gender'
ATTR_MOTION = 'motion'
ATTR_GLASSES = 'glasses'
ATTR_FACES = 'faces'
CONF_GROUP = 'group'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_GROUP): cv.slugify,
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the microsoft face identify platform."""
api = hass.data[DATA_MICROSOFT_FACE]
face_group = config[CONF_GROUP]
confidence = config[CONF_CONFIDENCE]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(MicrosoftFaceIdentifyEntity(
camera[CONF_ENTITY_ID], api, face_group, confidence,
camera.get(CONF_NAME)
))
async_add_devices(entities)
class ImageProcessingFaceEntity(ImageProcessingEntity):
"""Base entity class for face image processing."""
def __init__(self):
"""Initialize base face identify/verify entity."""
self.faces = [] # last scan data
self.total_faces = 0 # face count
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
state = STATE_UNKNOWN
# no confidence support
if not self.confidence:
return self.total_faces
# search high confidence
for face in self.faces:
if ATTR_CONFIDENCE not in face:
continue
f_co = face[ATTR_CONFIDENCE]
if f_co > confidence:
confidence = f_co
for attr in [ATTR_NAME, ATTR_MOTION]:
if attr in face:
state = face[attr]
break
return state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return 'face'
@property
def state_attributes(self):
"""Return device specific state attributes."""
attr = {
ATTR_FACES: self.faces,
ATTR_TOTAL_FACES: self.total_faces,
}
return attr
def process_faces(self, faces, total):
"""Send event with detected faces and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_faces, faces, total).result()
@callback
def async_process_faces(self, faces, total):
"""Send event with detected faces and store data.
known are a dict in follow format:
[
{
ATTR_CONFIDENCE: 80,
ATTR_NAME: 'Name',
ATTR_AGE: 12.0,
ATTR_GENDER: 'man',
ATTR_MOTION: 'smile',
ATTR_GLASSES: 'sunglasses'
},
]
This method must be run in the event loop.
"""
# send events
for face in faces:
if ATTR_CONFIDENCE in face and self.confidence:
if face[ATTR_CONFIDENCE] < self.confidence:
continue
face.update({ATTR_ENTITY_ID: self.entity_id})
self.hass.async_add_job(
self.hass.bus.async_fire, EVENT_DETECT_FACE, face
)
# update entity store
self.faces = faces
self.total_faces = total
class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
"""Microsoft face api entity for identify."""
def __init__(self, camera_entity, api, face_group, confidence, name=None):
"""Initialize openalpr local api."""
super().__init__()
self._api = api
self._camera = camera_entity
self._confidence = confidence
self._face_group = face_group
if name:
self._name = name
else:
self._name = "MicrosoftFace {0}".format(
split_entity_id(camera_entity)[1])
@property
def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
@asyncio.coroutine
def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
detect = None
try:
face_data = yield from self._api.call_api(
'post', 'detect', image, binary=True)
if face_data is None or len(face_data) < 1:
return
face_ids = [data['faceId'] for data in face_data]
detect = yield from self._api.call_api(
'post', 'identify',
{'faceIds': face_ids, 'personGroupId': self._face_group})
except HomeAssistantError as err:
_LOGGER.error("Can't process image on microsoft face: %s", err)
return
# parse data
knwon_faces = []
total = 0
for face in detect:
total += 1
if len(face['candidates']) == 0:
continue
data = face['candidates'][0]
name = ''
for s_name, s_id in self._api.store[self._face_group].items():
if data['personId'] == s_id:
name = s_name
break
knwon_faces.append({
ATTR_NAME: name,
ATTR_CONFIDENCE: data['confidence'] * 100,
})
# process data
self.async_process_faces(knwon_faces, total)
| |
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import itertools
import json
import logging
import os
from dataclasses import dataclass
from typing import List, Set, Tuple, cast
from pants.backend.python.python_artifact import PythonArtifact
from pants.backend.python.rules.pex import (
Pex,
PexInterpreterConstraints,
PexRequest,
PexRequirements,
)
from pants.backend.python.rules.setuptools import Setuptools
from pants.backend.python.rules.util import (
PackageDatum,
distutils_repr,
find_packages,
is_python2,
source_root_or_raise,
)
from pants.backend.python.subsystems.subprocess_environment import SubprocessEncodingEnvironment
from pants.backend.python.target_types import (
PythonEntryPoint,
PythonInterpreterCompatibility,
PythonProvidesField,
PythonRequirementsField,
PythonSources,
)
from pants.base.specs import AddressSpecs, AscendantAddresses, SingleAddress
from pants.core.target_types import ResourcesSources
from pants.core.util_rules.determine_source_files import AllSourceFilesRequest, SourceFiles
from pants.core.util_rules.distdir import DistDir
from pants.core.util_rules.strip_source_roots import (
SourceRootStrippedSources,
StripSourcesFieldRequest,
)
from pants.engine.addresses import Address, Addresses
from pants.engine.collection import Collection, DeduplicatedCollection
from pants.engine.console import Console
from pants.engine.fs import (
AddPrefix,
Digest,
DirectoryToMaterialize,
FileContent,
FilesContent,
InputFilesContent,
MergeDigests,
PathGlobs,
RemovePrefix,
Snapshot,
SnapshotSubset,
Workspace,
)
from pants.engine.goal import Goal, GoalSubsystem
from pants.engine.process import Process, ProcessResult
from pants.engine.rules import SubsystemRule, goal_rule, named_rule, rule
from pants.engine.selectors import Get, MultiGet
from pants.engine.target import (
Dependencies,
Sources,
Target,
Targets,
TargetsWithOrigins,
TransitiveTargets,
)
from pants.engine.unions import UnionMembership
from pants.option.custom_types import shell_str
from pants.python.python_setup import PythonSetup
from pants.source.source_root import SourceRootConfig
from pants.util.ordered_set import FrozenOrderedSet
logger = logging.getLogger(__name__)
class InvalidSetupPyArgs(Exception):
"""Indicates invalid arguments to setup.py."""
class TargetNotExported(Exception):
"""Indicates a target that was expected to be exported is not."""
class NoOwnerError(Exception):
"""Indicates an exportable target has no owning exported target."""
class AmbiguousOwnerError(Exception):
"""Indicates an exportable target has more than one owning exported target."""
class UnsupportedPythonVersion(Exception):
"""Indicates that the Python version is unsupported for running setup.py commands."""
class InvalidEntryPoint(Exception):
"""Indicates that a specified binary entry point was invalid."""
@dataclass(frozen=True)
class ExportedTarget:
"""A target that explicitly exports a setup.py artifact, using a `provides=` stanza.
The code provided by this artifact can be from this target or from any targets it owns.
"""
target: Target
@property
def provides(self) -> PythonArtifact:
return cast(PythonArtifact, self.target[PythonProvidesField].value)
@dataclass(frozen=True)
class DependencyOwner:
"""An ExportedTarget in its role as an owner of other targets.
We need this type to prevent rule ambiguities when computing the list of targets owned by an
ExportedTarget (which involves going from ExportedTarget -> dep -> owner (which is itself an
ExportedTarget) and checking if owner is this the original ExportedTarget.
"""
exported_target: ExportedTarget
@dataclass(frozen=True)
class OwnedDependency:
"""A target that is owned by some ExportedTarget.
Code in this target is published in the owner's artifact.
The owner of a target T is T's closest filesystem ancestor among the exported targets
that directly or indirectly depend on it (including T itself).
"""
target: Target
class OwnedDependencies(Collection[OwnedDependency]):
pass
class ExportedTargetRequirements(DeduplicatedCollection[str]):
"""The requirements of an ExportedTarget.
Includes:
- The "normal" 3rdparty requirements of the ExportedTarget and all targets it owns.
- The published versions of any other ExportedTargets it depends on.
"""
sort_input = True
@dataclass(frozen=True)
class AncestorInitPyFiles:
"""__init__.py files in enclosing packages of the exported code."""
digests: Tuple[Digest, ...] # The files stripped of their source roots.
@dataclass(frozen=True)
class SetupPySourcesRequest:
targets: Targets
py2: bool # Whether to use py2 or py3 package semantics.
@dataclass(frozen=True)
class SetupPySources:
"""The sources required by a setup.py command.
Includes some information derived from analyzing the source, namely the packages, namespace
packages and resource files in the source.
"""
digest: Digest
packages: Tuple[str, ...]
namespace_packages: Tuple[str, ...]
package_data: Tuple[PackageDatum, ...]
@dataclass(frozen=True)
class SetupPyChrootRequest:
"""A request to create a chroot containing a setup.py and the sources it operates on."""
exported_target: ExportedTarget
py2: bool # Whether to use py2 or py3 package semantics.
@dataclass(frozen=True)
class SetupPyChroot:
"""A chroot containing a generated setup.py and the sources it operates on."""
digest: Digest
# The keywords are embedded in the setup.py file in the digest, so these aren't
# strictly needed here, but they are convenient for testing.
setup_keywords_json: str
@dataclass(frozen=True)
class RunSetupPyRequest:
"""A request to run a setup.py command."""
exported_target: ExportedTarget
chroot: SetupPyChroot
args: Tuple[str, ...]
@dataclass(frozen=True)
class RunSetupPyResult:
"""The result of running a setup.py command."""
output: Digest # The state of the chroot after running setup.py.
@dataclass(frozen=True)
class SetuptoolsSetup:
"""The setuptools tool."""
requirements_pex: Pex
class SetupPyOptions(GoalSubsystem):
"""Run setup.py commands."""
name = "setup-py2"
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--args",
type=list,
member_type=shell_str,
help="Arguments to pass directly to setup.py, e.g. "
'`--setup-py2-args="bdist_wheel --python-tag py36.py37"`. If unspecified, we just '
"dump the setup.py chroot.",
)
register(
"--transitive",
type=bool,
default=False,
help="If specified, will run the setup.py command recursively on all exported targets that "
"the specified targets depend on, in dependency order. This is useful, e.g., when "
"the command publishes dists, to ensure that any dependencies of a dist are published "
"before it.",
)
class SetupPy(Goal):
subsystem_cls = SetupPyOptions
def validate_args(args: Tuple[str, ...]):
# We rely on the dist dir being the default, so we know where to find the created dists.
if "--dist-dir" in args or "-d" in args:
raise InvalidSetupPyArgs(
"Cannot set --dist-dir/-d in setup.py args. To change where dists "
"are written, use the global --pants-distdir option."
)
# We don't allow publishing via setup.py, as we don't want the setup.py running rule,
# which is not a @goal_rule, to side-effect (plus, we'd need to ensure that publishing
# happens in dependency order). Note that `upload` and `register` were removed in
# setuptools 42.0.0, in favor of Twine, but we still check for them in case the user modified
# the default version used by our Setuptools subsystem.
# TODO: A `publish` rule, that can invoke Twine to do the actual uploading.
# See https://github.com/pantsbuild/pants/issues/8935.
if "upload" in args or "register" in args:
raise InvalidSetupPyArgs("Cannot use the `upload` or `register` setup.py commands")
@goal_rule
async def run_setup_pys(
targets_with_origins: TargetsWithOrigins,
options: SetupPyOptions,
console: Console,
python_setup: PythonSetup,
distdir: DistDir,
workspace: Workspace,
union_membership: UnionMembership,
) -> SetupPy:
"""Run setup.py commands on all exported targets addressed."""
args = tuple(options.values.args)
validate_args(args)
# Get all exported targets, ignoring any non-exported targets that happened to be
# globbed over, but erroring on any explicitly-requested non-exported targets.
exported_targets: List[ExportedTarget] = []
explicit_nonexported_targets: List[Target] = []
for target_with_origin in targets_with_origins:
tgt = target_with_origin.target
if _is_exported(tgt):
exported_targets.append(ExportedTarget(tgt))
elif isinstance(target_with_origin.origin, SingleAddress):
explicit_nonexported_targets.append(tgt)
if explicit_nonexported_targets:
raise TargetNotExported(
"Cannot run setup.py on these targets, because they have no `provides=` clause: "
f'{", ".join(so.address.reference() for so in explicit_nonexported_targets)}'
)
if options.values.transitive:
# Expand out to all owners of the entire dep closure.
transitive_targets = await Get[TransitiveTargets](
Addresses(et.target.address for et in exported_targets)
)
owners = await MultiGet(
Get[ExportedTarget](OwnedDependency(tgt))
for tgt in transitive_targets.closure
if is_ownable_target(tgt, union_membership)
)
exported_targets = list(FrozenOrderedSet(owners))
py2 = is_python2(
(
target_with_origin.target.get(PythonInterpreterCompatibility).value
for target_with_origin in targets_with_origins
),
python_setup,
)
chroots = await MultiGet(
Get[SetupPyChroot](SetupPyChrootRequest(exported_target, py2))
for exported_target in exported_targets
)
# If args were provided, run setup.py with them; Otherwise just dump chroots.
if args:
setup_py_results = await MultiGet(
Get[RunSetupPyResult](RunSetupPyRequest(exported_target, chroot, tuple(args)))
for exported_target, chroot in zip(exported_targets, chroots)
)
for exported_target, setup_py_result in zip(exported_targets, setup_py_results):
addr = exported_target.target.address.reference()
console.print_stderr(f"Writing dist for {addr} under {distdir.relpath}/.")
workspace.materialize_directory(
DirectoryToMaterialize(setup_py_result.output, path_prefix=str(distdir.relpath))
)
else:
# Just dump the chroot.
for exported_target, chroot in zip(exported_targets, chroots):
addr = exported_target.target.address.reference()
provides = exported_target.provides
setup_py_dir = distdir.relpath / f"{provides.name}-{provides.version}"
console.print_stderr(f"Writing setup.py chroot for {addr} to {setup_py_dir}")
workspace.materialize_directory(
DirectoryToMaterialize(chroot.digest, path_prefix=str(setup_py_dir))
)
return SetupPy(0)
# We write .py sources into the chroot under this dir.
CHROOT_SOURCE_ROOT = "src"
SETUP_BOILERPLATE = """
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
# Target: {target_address_spec}
from setuptools import setup
setup(**{setup_kwargs_str})
"""
@rule
async def run_setup_py(
req: RunSetupPyRequest,
setuptools_setup: SetuptoolsSetup,
python_setup: PythonSetup,
subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> RunSetupPyResult:
"""Run a setup.py command on a single exported target."""
input_digest = await Get[Digest](
MergeDigests((req.chroot.digest, setuptools_setup.requirements_pex.digest))
)
# The setuptools dist dir, created by it under the chroot (not to be confused with
# pants's own dist dir, at the buildroot).
dist_dir = "dist/"
process = setuptools_setup.requirements_pex.create_process(
python_setup=python_setup,
subprocess_encoding_environment=subprocess_encoding_environment,
pex_path="./setuptools.pex",
pex_args=("setup.py", *req.args),
input_digest=input_digest,
# setuptools commands that create dists write them to the distdir.
# TODO: Could there be other useful files to capture?
output_directories=(dist_dir,),
description=f"Run setuptools for {req.exported_target.target.address.reference()}",
)
result = await Get[ProcessResult](Process, process)
output_digest = await Get[Digest](RemovePrefix(result.output_digest, dist_dir))
return RunSetupPyResult(output_digest)
@rule
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
exported_target = request.exported_target
owned_deps = await Get[OwnedDependencies](DependencyOwner(exported_target))
targets = Targets(od.target for od in owned_deps)
sources = await Get[SetupPySources](SetupPySourcesRequest(targets, py2=request.py2))
requirements = await Get[ExportedTargetRequirements](DependencyOwner(exported_target))
# Nest the sources under the src/ prefix.
src_digest = await Get[Digest](AddPrefix(sources.digest, CHROOT_SOURCE_ROOT))
target = exported_target.target
provides = exported_target.provides
# Generate the kwargs to the setup() call.
setup_kwargs = provides.setup_py_keywords.copy()
setup_kwargs.update(
{
"package_dir": {"": CHROOT_SOURCE_ROOT},
"packages": sources.packages,
"namespace_packages": sources.namespace_packages,
"package_data": dict(sources.package_data),
"install_requires": tuple(requirements),
}
)
key_to_binary_spec = provides.binaries
keys = list(key_to_binary_spec.keys())
binaries = await Get[Targets](
Addresses(
Address.parse(key_to_binary_spec[key], relative_to=target.address.spec_path)
for key in keys
)
)
for key, binary in zip(keys, binaries):
binary_entry_point = binary.get(PythonEntryPoint).value
if not binary_entry_point:
raise InvalidEntryPoint(
f"The binary {key} exported by {target.address.reference()} is not a valid entry "
f"point."
)
entry_points = setup_kwargs["entry_points"] = setup_kwargs.get("entry_points", {})
console_scripts = entry_points["console_scripts"] = entry_points.get("console_scripts", [])
console_scripts.append(f"{key}={binary_entry_point}")
# Generate the setup script.
setup_py_content = SETUP_BOILERPLATE.format(
target_address_spec=target.address.reference(),
setup_kwargs_str=distutils_repr(setup_kwargs),
).encode()
extra_files_digest = await Get[Digest](
InputFilesContent(
[
FileContent("setup.py", setup_py_content),
FileContent(
"MANIFEST.in", "include *.py".encode()
), # Make sure setup.py is included.
]
)
)
chroot_digest = await Get[Digest](MergeDigests((src_digest, extra_files_digest)))
return SetupPyChroot(chroot_digest, json.dumps(setup_kwargs, sort_keys=True))
@rule
async def get_sources(
request: SetupPySourcesRequest, source_root_config: SourceRootConfig
) -> SetupPySources:
targets = request.targets
stripped_srcs_list = await MultiGet(
Get[SourceRootStrippedSources](
StripSourcesFieldRequest(
target.get(Sources),
for_sources_types=(PythonSources, ResourcesSources),
enable_codegen=True,
)
)
for target in targets
)
# Create a chroot with all the sources, and any ancestor __init__.py files that might be needed
# for imports to work. Note that if a repo has multiple exported targets under a single ancestor
# package, then that package must be a namespace package, which in Python 3 means it must not
# have an __init__.py. We don't validate this here, because it would require inspecting *all*
# targets, whether or not they are in the target set for this run - basically the entire repo.
# So it's the repo owners' responsibility to ensure __init__.py hygiene.
stripped_srcs_digests = [
stripped_sources.snapshot.digest for stripped_sources in stripped_srcs_list
]
ancestor_init_pys = await Get[AncestorInitPyFiles](Targets, targets)
sources_digest = await Get[Digest](
MergeDigests((*stripped_srcs_digests, *ancestor_init_pys.digests))
)
init_pys_snapshot = await Get[Snapshot](
SnapshotSubset(sources_digest, PathGlobs(["**/__init__.py"]))
)
init_py_contents = await Get[FilesContent](Digest, init_pys_snapshot.digest)
packages, namespace_packages, package_data = find_packages(
source_roots=source_root_config.get_source_roots(),
tgts_and_stripped_srcs=list(zip(targets, stripped_srcs_list)),
init_py_contents=init_py_contents,
py2=request.py2,
)
return SetupPySources(
digest=sources_digest,
packages=packages,
namespace_packages=namespace_packages,
package_data=package_data,
)
@rule
async def get_ancestor_init_py(
targets: Targets, source_root_config: SourceRootConfig
) -> AncestorInitPyFiles:
"""Find any ancestor __init__.py files for the given targets.
Includes sibling __init__.py files. Returns the files stripped of their source roots.
"""
source_roots = source_root_config.get_source_roots()
sources = await Get[SourceFiles](
AllSourceFilesRequest(
(tgt.get(Sources) for tgt in targets),
for_sources_types=(PythonSources,),
enable_codegen=True,
)
)
# Find the ancestors of all dirs containing .py files, including those dirs themselves.
source_dir_ancestors: Set[Tuple[str, str]] = set() # Items are (src_root, path incl. src_root).
for fp in sources.snapshot.files:
source_dir_ancestor = os.path.dirname(fp)
source_root = source_root_or_raise(source_roots, fp)
# Do not allow the repository root to leak (i.e., '.' should not be a package in setup.py).
while source_dir_ancestor != source_root:
source_dir_ancestors.add((source_root, source_dir_ancestor))
source_dir_ancestor = os.path.dirname(source_dir_ancestor)
source_dir_ancestors_list = list(source_dir_ancestors) # To force a consistent order.
# Note that we must MultiGet single globs instead of a a single Get for all the globs, because
# we match each result to its originating glob (see use of zip below).
ancestor_init_py_snapshots = await MultiGet[Snapshot](
Get[Snapshot](PathGlobs, PathGlobs([os.path.join(source_dir_ancestor[1], "__init__.py")]))
for source_dir_ancestor in source_dir_ancestors_list
)
source_root_stripped_ancestor_init_pys = await MultiGet[Digest](
Get[Digest](RemovePrefix(snapshot.digest, source_dir_ancestor[0]))
for snapshot, source_dir_ancestor in zip(
ancestor_init_py_snapshots, source_dir_ancestors_list
)
)
return AncestorInitPyFiles(source_root_stripped_ancestor_init_pys)
def _is_exported(target: Target) -> bool:
return target.has_field(PythonProvidesField) and target[PythonProvidesField].value is not None
@named_rule(desc="Compute distribution's 3rd party requirements")
async def get_requirements(
dep_owner: DependencyOwner, union_membership: UnionMembership
) -> ExportedTargetRequirements:
transitive_targets = await Get[TransitiveTargets](
Addresses([dep_owner.exported_target.target.address])
)
ownable_tgts = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get[ExportedTarget](OwnedDependency(tgt)) for tgt in ownable_tgts)
owned_by_us: Set[Target] = set()
owned_by_others: Set[Target] = set()
for tgt, owner in zip(ownable_tgts, owners):
(owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)
# Get all 3rdparty deps of our owned deps.
#
# Note that we need only consider requirements that are direct dependencies of our owned deps:
# If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
# if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
# then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
# will require ET.
#
# TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type
# `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or
# stricter build graph hygiene, it makes sense to require that targets directly declare their
# true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect
# deps across exported target boundaries, it's not a big stretch to just insist that
# requirements must be direct deps.
direct_deps_addrs = sorted(
set(itertools.chain.from_iterable(tgt.get(Dependencies).value or () for tgt in owned_by_us))
)
direct_deps_tgts = await Get[Targets](Addresses(direct_deps_addrs))
reqs = PexRequirements.create_from_requirement_fields(
tgt[PythonRequirementsField]
for tgt in direct_deps_tgts
if tgt.has_field(PythonRequirementsField)
)
req_strs = list(reqs)
# Add the requirements on any exported targets on which we depend.
exported_targets_we_depend_on = await MultiGet(
Get[ExportedTarget](OwnedDependency(tgt)) for tgt in owned_by_others
)
req_strs.extend(et.provides.requirement for et in set(exported_targets_we_depend_on))
return ExportedTargetRequirements(req_strs)
@named_rule(desc="Find all code to be published in the distribution")
async def get_owned_dependencies(
dependency_owner: DependencyOwner, union_membership: UnionMembership
) -> OwnedDependencies:
"""Find the dependencies of dependency_owner that are owned by it.
Includes dependency_owner itself.
"""
transitive_targets = await Get[TransitiveTargets](
Addresses([dependency_owner.exported_target.target.address])
)
ownable_targets = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get[ExportedTarget](OwnedDependency(tgt)) for tgt in ownable_targets)
owned_dependencies = [
tgt
for owner, tgt in zip(owners, ownable_targets)
if owner == dependency_owner.exported_target
]
return OwnedDependencies(OwnedDependency(t) for t in owned_dependencies)
@named_rule(desc="Get exporting owner for target")
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
"""Find the exported target that owns the given target (and therefore exports it).
The owner of T (i.e., the exported target in whose artifact T's code is published) is:
1. An exported target that depends on T (or is T itself).
2. Is T's closest filesystem ancestor among those satisfying 1.
If there are multiple such exported targets at the same degree of ancestry, the ownership
is ambiguous and an error is raised. If there is no exported target that depends on T
and is its ancestor, then there is no owner and an error is raised.
"""
target = owned_dependency.target
ancestor_addrs = AscendantAddresses(target.address.spec_path)
ancestor_tgts = await Get[Targets](AddressSpecs((ancestor_addrs,)))
# Note that addresses sort by (spec_path, target_name), and all these targets are
# ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
# address will effectively sort by closeness of ancestry to the given target.
exported_ancestor_tgts = sorted(
[t for t in ancestor_tgts if _is_exported(t)], key=lambda t: t.address, reverse=True,
)
exported_ancestor_iter = iter(exported_ancestor_tgts)
for exported_ancestor in exported_ancestor_iter:
transitive_targets = await Get[TransitiveTargets](Addresses([exported_ancestor.address]))
if target in transitive_targets.closure:
owner = exported_ancestor
# Find any exported siblings of owner that also depend on target. They have the
# same spec_path as it, so they must immediately follow it in ancestor_iter.
sibling_owners = []
sibling = next(exported_ancestor_iter, None)
while sibling and sibling.address.spec_path == owner.address.spec_path:
transitive_targets = await Get[TransitiveTargets](Addresses([sibling.address]))
if target in transitive_targets.closure:
sibling_owners.append(sibling)
sibling = next(exported_ancestor_iter, None)
if sibling_owners:
raise AmbiguousOwnerError(
f"Exporting owners for {target.address.reference()} are "
f"ambiguous. Found {exported_ancestor.address.reference()} and "
f"{len(sibling_owners)} others: "
f'{", ".join(so.address.reference() for so in sibling_owners)}'
)
return ExportedTarget(owner)
raise NoOwnerError(f"No exported target owner found for {target.address.reference()}")
@named_rule(desc="Set up setuptools")
async def setup_setuptools(setuptools: Setuptools) -> SetuptoolsSetup:
# Note that this pex has no entrypoint. We use it to run our generated setup.py, which
# in turn imports from and invokes setuptools.
requirements_pex = await Get[Pex](
PexRequest(
output_filename="setuptools.pex",
requirements=PexRequirements(setuptools.get_requirement_specs()),
interpreter_constraints=PexInterpreterConstraints(
setuptools.default_interpreter_constraints
),
)
)
return SetuptoolsSetup(requirements_pex=requirements_pex,)
def is_ownable_target(tgt: Target, union_membership: UnionMembership) -> bool:
return (
tgt.has_field(PythonSources)
or tgt.has_field(ResourcesSources)
or tgt.get(Sources).can_generate(PythonSources, union_membership)
)
def rules():
return [
run_setup_pys,
run_setup_py,
generate_chroot,
get_sources,
get_requirements,
get_ancestor_init_py,
get_owned_dependencies,
get_exporting_owner,
setup_setuptools,
SubsystemRule(Setuptools),
]
| |
# Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import context
from cinder import exception
from cinder.i18n import _
from cinder import utils
from cinder.volume.drivers.huawei import constants
from cinder.volume import qos_specs
LOG = logging.getLogger(__name__)
class SmartQos(object):
def __init__(self, client):
self.client = client
@staticmethod
def get_qos_by_volume_type(volume_type):
# We prefer the qos_specs association
# and override any existing extra-specs settings
# if present.
if not volume_type:
return {}
qos_specs_id = volume_type.get('qos_specs_id')
if not qos_specs_id:
return {}
qos = {}
io_type_flag = None
ctxt = context.get_admin_context()
consumer = qos_specs.get_qos_specs(ctxt, qos_specs_id)['consumer']
if consumer == 'front-end':
return {}
kvs = qos_specs.get_qos_specs(ctxt, qos_specs_id)['specs']
LOG.info('The QoS sepcs is: %s.', kvs)
for k, v in kvs.items():
if k not in constants.HUAWEI_VALID_KEYS:
continue
if k != 'IOType' and int(v) <= 0:
msg = _('QoS config is wrong. %s must > 0.') % k
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if k == 'IOType':
if v not in ['0', '1', '2']:
msg = _('Illegal value specified for IOTYPE: 0, 1, or 2.')
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
io_type_flag = 1
qos[k.upper()] = v
else:
qos[k.upper()] = v
if not io_type_flag:
msg = (_('QoS policy must specify for IOTYPE: 0, 1, or 2, '
'QoS policy: %(qos_policy)s ') % {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
# QoS policy must specify for IOTYPE and another qos_specs.
if len(qos) < 2:
msg = (_('QoS policy must specify for IOTYPE and another '
'qos_specs, QoS policy: %(qos_policy)s.')
% {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
for upper_limit in constants.UPPER_LIMIT_KEYS:
for lower_limit in constants.LOWER_LIMIT_KEYS:
if upper_limit in qos and lower_limit in qos:
msg = (_('QoS policy upper_limit and lower_limit '
'conflict, QoS policy: %(qos_policy)s.')
% {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
return qos
def _is_high_priority(self, qos):
"""Check QoS priority."""
for key, value in qos.items():
if (key.find('MIN') == 0) or (key.find('LATENCY') == 0):
return True
return False
@utils.synchronized('huawei_qos', external=True)
def add(self, qos, lun_id):
policy_id = None
try:
# Check QoS priority.
if self._is_high_priority(qos):
self.client.change_lun_priority(lun_id)
# Create QoS policy and activate it.
version = self.client.find_array_version()
if version >= constants.ARRAY_VERSION:
(qos_id, lun_list) = self.client.find_available_qos(qos)
if qos_id:
self.client.add_lun_to_qos(qos_id, lun_id, lun_list)
else:
policy_id = self.client.create_qos_policy(qos, lun_id)
self.client.activate_deactivate_qos(policy_id, True)
else:
policy_id = self.client.create_qos_policy(qos, lun_id)
self.client.activate_deactivate_qos(policy_id, True)
except exception.VolumeBackendAPIException:
with excutils.save_and_reraise_exception():
if policy_id is not None:
self.client.delete_qos_policy(policy_id)
@utils.synchronized('huawei_qos', external=True)
def remove(self, qos_id, lun_id):
qos_info = self.client.get_qos_info(qos_id)
lun_list = self.client.get_lun_list_in_qos(qos_id, qos_info)
if len(lun_list) <= 1:
qos_status = qos_info['RUNNINGSTATUS']
# 2: Active status.
if qos_status != constants.STATUS_QOS_INACTIVE:
self.client.activate_deactivate_qos(qos_id, False)
self.client.delete_qos_policy(qos_id)
else:
self.client.remove_lun_from_qos(lun_id, lun_list, qos_id)
class SmartPartition(object):
def __init__(self, client):
self.client = client
def add(self, opts, lun_id):
if opts['smartpartition'] != 'true':
return
if not opts['partitionname']:
raise exception.InvalidInput(
reason=_('Partition name is None, please set '
'smartpartition:partitionname in key.'))
partition_id = self.client.get_partition_id_by_name(
opts['partitionname'])
if not partition_id:
raise exception.InvalidInput(
reason=(_('Can not find partition id by name %(name)s.')
% {'name': opts['partitionname']}))
self.client.add_lun_to_partition(lun_id, partition_id)
class SmartCache(object):
def __init__(self, client):
self.client = client
def add(self, opts, lun_id):
if opts['smartcache'] != 'true':
return
if not opts['cachename']:
raise exception.InvalidInput(
reason=_('Cache name is None, please set '
'smartcache:cachename in key.'))
cache_id = self.client.get_cache_id_by_name(opts['cachename'])
if not cache_id:
raise exception.InvalidInput(
reason=(_('Can not find cache id by cache name %(name)s.')
% {'name': opts['cachename']}))
self.client.add_lun_to_cache(lun_id, cache_id)
class SmartX(object):
def get_smartx_specs_opts(self, opts):
# Check that smarttier is 0/1/2/3
opts = self.get_smarttier_opts(opts)
opts = self.get_smartthin_opts(opts)
opts = self.get_smartcache_opts(opts)
opts = self.get_smartpartition_opts(opts)
return opts
def get_smarttier_opts(self, opts):
if opts['smarttier'] == 'true':
if not opts['policy']:
opts['policy'] = '1'
elif opts['policy'] not in ['0', '1', '2', '3']:
raise exception.InvalidInput(
reason=(_('Illegal value specified for smarttier: '
'set to either 0, 1, 2, or 3.')))
else:
opts['policy'] = '0'
return opts
def get_smartthin_opts(self, opts):
if opts['thin_provisioning_support'] == 'true':
if opts['thick_provisioning_support'] == 'true':
raise exception.InvalidInput(
reason=(_('Illegal value specified for thin: '
'Can not set thin and thick at the same time.')))
else:
opts['LUNType'] = constants.THIN_LUNTYPE
if opts['thick_provisioning_support'] == 'true':
opts['LUNType'] = constants.THICK_LUNTYPE
return opts
def get_smartcache_opts(self, opts):
if opts['smartcache'] == 'true':
if not opts['cachename']:
raise exception.InvalidInput(
reason=_('Cache name is None, please set '
'smartcache:cachename in key.'))
else:
opts['cachename'] = None
return opts
def get_smartpartition_opts(self, opts):
if opts['smartpartition'] == 'true':
if not opts['partitionname']:
raise exception.InvalidInput(
reason=_('Partition name is None, please set '
'smartpartition:partitionname in key.'))
else:
opts['partitionname'] = None
return opts
| |
"""archvyrt ubuntu provisioner module"""
# stdlib
import logging
import os
# archvyrt
import archvyrt.tools as tools
from .base import LinuxProvisioner
LOG = logging.getLogger(__name__)
class UbuntuProvisioner(LinuxProvisioner):
"""
Ubuntu Provisioner
"""
def _install(self):
"""
Ubuntu base installation
"""
LOG.info('Do Ubuntu installation')
apt_env = {'DEBIAN_FRONTEND': "noninteractive"}
self.run(
tools.DEBOOTSTRAP,
'bionic',
self.target,
'http://ch.archive.ubuntu.com/ubuntu/'
)
self.runchroot(
'apt-get',
'update',
add_env=apt_env
)
def _network_config(self):
"""
Domain network configuration
"""
LOG.info('Setup guest networking')
apt_env = {'DEBIAN_FRONTEND': "noninteractive"}
self.runchroot(
'apt-get',
'-qy',
'install',
'ifupdown',
add_env=apt_env
)
self.runchroot(
'apt-get',
'-qy',
'purge',
'networkd-dispatcher',
'netplan.io',
'nplan',
add_env=apt_env
)
self.runchroot(
'systemctl',
'disable',
'systemd-resolved',
)
self.runchroot(
'rm',
'/etc/resolv.conf',
)
# get provisioned interfaces
interfaces = self.domain.xml.find('devices').findall('interface')
dns_servers = []
addresses = []
udev_lines = []
for interface, network in zip(interfaces, self.domain.networks):
# update interface xml with provisioned interface
# this also includes pci slots and mac-addresses
network.xml = interface
if network.ipv4_address:
addresses.append(network.ipv4_address.ip)
if network.ipv6_address:
addresses.append(network.ipv6_address.ip)
if network.mac:
udev_lines.append(
'SUBSYSTEM=="net", ACTION=="add", '
'ATTR{address}=="%s", NAME="%s"' % (network.mac,
network.name)
)
self.writetargetfile(
'/etc/network/interfaces',
['auto lo',
'iface lo inet loopback',
'',
'source /etc/network/interfaces.d/*']
)
self.writetargetfile(
'/etc/network/interfaces.d/%s' % network.name,
network.interfaces
)
if network.dns:
for server in network.dns:
dns_servers.append('nameserver %s' % str(server))
self.writetargetfile(
'/etc/udev/rules.d/10-network.rules',
udev_lines
)
self.writetargetfile('/etc/hostname', [self.domain.hostname, ])
host_entries = [
'127.0.0.1 localhost.localdomain localhost',
'::1 localhost.localdomain localhost'
]
if addresses:
for address in addresses:
host_entries.append(
'%s %s %s' % (
address,
self.domain.fqdn,
self.domain.hostname
)
)
self.writetargetfile('/etc/hosts', host_entries)
if dns_servers:
self.writetargetfile(
'/etc/resolv.conf',
dns_servers
)
def _locale_config(self):
"""
Domain locale/language settings
"""
pass
def _boot_config(self):
"""
Domain bootloader, initrd configuration
"""
LOG.info('Setup boot configuration')
apt_env = {'DEBIAN_FRONTEND': "noninteractive"}
self.runchroot(
'apt-get',
'-qy',
'install',
'grub-pc',
'linux-image-virtual',
add_env=apt_env
)
self.runchroot(
'grub-install',
'--target=i386-pc',
'/dev/nbd0'
)
# Enable serial console
self.runchroot(
'systemctl',
'enable',
'getty@ttyS0.service'
)
# Remove quiet and splash option
self.runchroot(
'sed',
'-i',
# pylint: disable=anomalous-backslash-in-string
's/^\(GRUB_CMDLINE_LINUX_DEFAULT=\).*/\\1""/',
'/etc/default/grub'
)
self.runchroot(
'update-grub',
)
# With nbd devices, grub-mkconfig does not use the UUID/LABEL
# So change it in the resulting file
self.run(
tools.SED,
'-i',
'-e',
# pylint: disable=anomalous-backslash-in-string
's/vmlinuz-\(.*\) root=[^ ]*/vmlinuz-\\1 root=UUID=%s/' %
self._uuid['ext4']['/'],
'%s/boot/grub/grub.cfg' % self.target
)
def _access_config(self):
"""
Domain access configuration such as sudo/ssh and local users
"""
LOG.info('Setup ssh/local user access')
apt_env = {'DEBIAN_FRONTEND': "noninteractive"}
self.runchroot(
'apt-get',
'-qy',
'install',
'ssh',
add_env=apt_env
)
if self.domain.password:
self.runchroot(
'usermod',
'-p',
self.domain.password,
'root'
)
if self.domain.sshkeys:
authorized_keys = []
for key, value in self.domain.sshkeys.items():
authorized_keys.append(
"%s %s %s" % (value['type'], value['key'], key)
)
os.mkdir('%s/root/.ssh' % self.target)
self.writetargetfile(
'/root/.ssh/authorized_keys',
authorized_keys
)
| |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Job registries."""
from core.domain import activity_jobs_one_off
from core.domain import collection_jobs_one_off
from core.domain import email_jobs_one_off
from core.domain import exp_jobs_one_off
from core.domain import feedback_jobs_continuous
from core.domain import prod_validation_jobs_one_off
from core.domain import question_jobs_one_off
from core.domain import recommendations_jobs_one_off
from core.domain import skill_jobs_one_off
from core.domain import stats_jobs_continuous
from core.domain import stats_jobs_one_off
from core.domain import story_jobs_one_off
from core.domain import topic_jobs_one_off
from core.domain import user_jobs_continuous
from core.domain import user_jobs_one_off
# List of all manager classes for one-off batch jobs for which to show controls
# on the admin dashboard.
ONE_OFF_JOB_MANAGERS = [
activity_jobs_one_off.IndexAllActivitiesJobManager,
collection_jobs_one_off.CollectionMigrationOneOffJob,
email_jobs_one_off.EmailHashRegenerationOneOffJob,
exp_jobs_one_off.ExpSummariesContributorsOneOffJob,
exp_jobs_one_off.ExpSummariesCreationOneOffJob,
exp_jobs_one_off.ExplorationContributorsSummaryOneOffJob,
exp_jobs_one_off.ExplorationFirstPublishedOneOffJob,
exp_jobs_one_off.ExplorationMigrationJobManager,
exp_jobs_one_off.ExplorationValidityJobManager,
exp_jobs_one_off.HintsAuditOneOffJob,
exp_jobs_one_off.ItemSelectionInteractionOneOffJob,
exp_jobs_one_off.ViewableExplorationsAuditJob,
exp_jobs_one_off.ExplorationContentValidationJobForCKEditor,
exp_jobs_one_off.InteractionCustomizationArgsValidationJob,
exp_jobs_one_off.TranslatorToVoiceArtistOneOffJob,
exp_jobs_one_off.DeleteStateIdMappingModelsOneOffJob,
question_jobs_one_off.QuestionMigrationOneOffJob,
recommendations_jobs_one_off.ExplorationRecommendationsOneOffJob,
skill_jobs_one_off.SkillMigrationOneOffJob,
stats_jobs_one_off.PlaythroughAudit,
stats_jobs_one_off.RecomputeStatisticsOneOffJob,
stats_jobs_one_off.RecomputeStatisticsValidationCopyOneOffJob,
stats_jobs_one_off.RegenerateMissingStatsModelsOneOffJob,
stats_jobs_one_off.StatisticsAuditV1,
stats_jobs_one_off.StatisticsAuditV2,
stats_jobs_one_off.StatisticsAudit,
story_jobs_one_off.StoryMigrationOneOffJob,
topic_jobs_one_off.TopicMigrationOneOffJob,
user_jobs_one_off.CleanupActivityIdsFromUserSubscriptionsModelOneOffJob,
user_jobs_one_off.DashboardSubscriptionsOneOffJob,
user_jobs_one_off.LongUserBiosOneOffJob,
user_jobs_one_off.UserContributionsOneOffJob,
user_jobs_one_off.UserFirstContributionMsecOneOffJob,
user_jobs_one_off.UserLastExplorationActivityOneOffJob,
user_jobs_one_off.UserProfilePictureOneOffJob,
user_jobs_one_off.UsernameLengthDistributionOneOffJob,
]
# List of all manager classes for prod validation one-off batch jobs for which
# to show controls on the admin dashboard.
AUDIT_JOB_MANAGERS = [
prod_validation_jobs_one_off.ActivityReferencesModelAuditOneOffJob,
prod_validation_jobs_one_off.RoleQueryAuditModelAuditOneOffJob,
prod_validation_jobs_one_off.ClassifierTrainingJobModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.TrainingJobExplorationMappingModelAuditOneOffJob),
prod_validation_jobs_one_off.CollectionModelAuditOneOffJob,
prod_validation_jobs_one_off.CollectionSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.CollectionSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.CollectionRightsModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.CollectionRightsSnapshotMetadataModelAuditOneOffJob),
(
prod_validation_jobs_one_off
.CollectionRightsSnapshotContentModelAuditOneOffJob),
prod_validation_jobs_one_off.CollectionCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.CollectionSummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.ConfigPropertyModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.ConfigPropertySnapshotMetadataModelAuditOneOffJob),
(
prod_validation_jobs_one_off
.ConfigPropertySnapshotContentModelAuditOneOffJob),
prod_validation_jobs_one_off.SentEmailModelAuditOneOffJob,
prod_validation_jobs_one_off.BulkEmailModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.GeneralFeedbackEmailReplyToIdModelAuditOneOffJob),
prod_validation_jobs_one_off.ExplorationModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationRightsModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.ExplorationRightsSnapshotMetadataModelAuditOneOffJob),
(
prod_validation_jobs_one_off
.ExplorationRightsSnapshotContentModelAuditOneOffJob),
prod_validation_jobs_one_off.ExplorationCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.ExpSummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.GeneralFeedbackThreadModelAuditOneOffJob,
prod_validation_jobs_one_off.GeneralFeedbackMessageModelAuditOneOffJob,
prod_validation_jobs_one_off.GeneralFeedbackThreadUserModelAuditOneOffJob,
prod_validation_jobs_one_off.FeedbackAnalyticsModelAuditOneOffJob,
prod_validation_jobs_one_off.UnsentFeedbackEmailModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationRecommendationsModelAuditOneOffJob,
prod_validation_jobs_one_off.FileMetadataModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.FileMetadataSnapshotMetadataModelAuditOneOffJob),
prod_validation_jobs_one_off.FileMetadataSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.FileModelAuditOneOffJob,
prod_validation_jobs_one_off.FileSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.FileSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.JobModelAuditOneOffJob,
prod_validation_jobs_one_off.ContinuousComputationModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionSkillLinkModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionRightsModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.QuestionRightsSnapshotMetadataModelAuditOneOffJob),
(
prod_validation_jobs_one_off
.QuestionRightsSnapshotContentModelAuditOneOffJob),
prod_validation_jobs_one_off.QuestionCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.QuestionSummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationRecommendationsModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicSimilaritiesModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillRightsModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillRightsSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillRightsSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.SkillSummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryModelAuditOneOffJob,
prod_validation_jobs_one_off.StorySnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.StorySnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryRightsModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryRightsSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryRightsSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.StorySummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.GeneralSuggestionModelAuditOneOffJob,
prod_validation_jobs_one_off.ReviewerRotationTrackingModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicRightsModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicRightsSnapshotMetadataModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicRightsSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.TopicSummaryModelAuditOneOffJob,
prod_validation_jobs_one_off.SubtopicPageModelAuditOneOffJob,
(
prod_validation_jobs_one_off
.SubtopicPageSnapshotMetadataModelAuditOneOffJob),
prod_validation_jobs_one_off.SubtopicPageSnapshotContentModelAuditOneOffJob,
prod_validation_jobs_one_off.SubtopicPageCommitLogEntryModelAuditOneOffJob,
prod_validation_jobs_one_off.UserSettingsModelAuditOneOffJob,
prod_validation_jobs_one_off.UserNormalizedNameAuditOneOffJob,
prod_validation_jobs_one_off.CompletedActivitiesModelAuditOneOffJob,
prod_validation_jobs_one_off.IncompleteActivitiesModelAuditOneOffJob,
prod_validation_jobs_one_off.ExpUserLastPlaythroughModelAuditOneOffJob,
prod_validation_jobs_one_off.LearnerPlaylistModelAuditOneOffJob,
prod_validation_jobs_one_off.UserContributionsModelAuditOneOffJob,
prod_validation_jobs_one_off.UserEmailPreferencesModelAuditOneOffJob,
prod_validation_jobs_one_off.UserSubscriptionsModelAuditOneOffJob,
prod_validation_jobs_one_off.UserSubscribersModelAuditOneOffJob,
prod_validation_jobs_one_off.UserRecentChangesBatchModelAuditOneOffJob,
prod_validation_jobs_one_off.UserStatsModelAuditOneOffJob,
prod_validation_jobs_one_off.ExplorationUserDataModelAuditOneOffJob,
prod_validation_jobs_one_off.CollectionProgressModelAuditOneOffJob,
prod_validation_jobs_one_off.StoryProgressModelAuditOneOffJob,
prod_validation_jobs_one_off.UserQueryModelAuditOneOffJob,
prod_validation_jobs_one_off.UserBulkEmailsModelAuditOneOffJob,
prod_validation_jobs_one_off.UserSkillMasteryModelAuditOneOffJob,
prod_validation_jobs_one_off.UserContributionScoringModelAuditOneOffJob
]
# List of all ContinuousComputation managers to show controls for on the
# admin dashboard.
# NOTE TO DEVELOPERS: When a new ContinuousComputation manager is defined,
# it should be registered here.
ALL_CONTINUOUS_COMPUTATION_MANAGERS = [
feedback_jobs_continuous.FeedbackAnalyticsAggregator,
stats_jobs_continuous.InteractionAnswerSummariesAggregator,
user_jobs_continuous.DashboardRecentUpdatesAggregator,
user_jobs_continuous.UserStatsAggregator,
]
class ContinuousComputationEventDispatcher(object):
"""Dispatches events to the relevant ContinuousComputation classes."""
@classmethod
def dispatch_event(cls, event_type, *args, **kwargs):
"""Dispatches an incoming event to the ContinuousComputation
classes which listen to events of that type.
"""
for klass in ALL_CONTINUOUS_COMPUTATION_MANAGERS:
if event_type in klass.get_event_types_listened_to():
klass.on_incoming_event(event_type, *args, **kwargs)
| |
from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urljoin, urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile(r'.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensures
compliance with RFC 7230, section 3.3.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, six.string_types)
filename = os.path.basename(file.name) if file_has_string_name else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
@property
def session(self):
"""
Obtains the current session variables.
"""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
if backend is None:
backend = settings.AUTHENTICATION_BACKENDS[0]
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, '_json'):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
response._json = json.loads(response.content.decode(), **extra)
return response._json
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
# Prepend the request path to handle relative path redirects
path = url.path
if not path.startswith('/'):
path = urljoin(response.request['PATH_INFO'], path)
response = self.get(path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
| |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module tests some of the methods related to YAML serialization.
Requires `pyyaml <https://pyyaml.org/>`_ to be installed.
"""
from io import StringIO
import pytest
import numpy as np
from astropy.coordinates import SkyCoord, EarthLocation, Angle, Longitude, Latitude
from astropy import units as u
from astropy.time import Time
from astropy.table import QTable, SerializedColumn
from astropy.tests.helper import catch_warnings
try:
from astropy.io.misc.yaml import load, load_all, dump
HAS_YAML = True
except ImportError:
HAS_YAML = False
pytestmark = pytest.mark.skipif('not HAS_YAML')
@pytest.mark.parametrize('c', [True, np.uint8(8), np.int16(4),
np.int32(1), np.int64(3), np.int64(2**63 - 1),
2.0, np.float64(),
3+4j, np.complex_(3 + 4j),
np.complex64(3 + 4j),
np.complex128(1. - 2**-52 + 1j * (1. - 2**-52))])
def test_numpy_types(c):
cy = load(dump(c))
assert c == cy
@pytest.mark.parametrize('c', [u.m, u.m / u.s, u.hPa, u.dimensionless_unscaled])
def test_unit(c):
cy = load(dump(c))
if isinstance(c, u.CompositeUnit):
assert c == cy
else:
assert c is cy
@pytest.mark.parametrize('c', [u.Unit('bakers_dozen', 13*u.one),
u.def_unit('magic')])
def test_custom_unit(c):
s = dump(c)
with catch_warnings() as w:
cy = load(s)
assert len(w) == 1
assert f"'{c!s}' did not parse" in str(w[0].message)
assert isinstance(cy, u.UnrecognizedUnit)
assert str(cy) == str(c)
with u.add_enabled_units(c):
with catch_warnings() as w2:
cy2 = load(s)
assert len(w2) == 0
assert cy2 is c
@pytest.mark.parametrize('c', [Angle('1 2 3', unit='deg'),
Longitude('1 2 3', unit='deg'),
Latitude('1 2 3', unit='deg'),
[[1], [3]] * u.m,
np.array([[1, 2], [3, 4]], order='F'),
np.array([[1, 2], [3, 4]], order='C'),
np.array([1, 2, 3, 4])[::2]])
def test_ndarray_subclasses(c):
cy = load(dump(c))
assert np.all(c == cy)
assert c.shape == cy.shape
assert type(c) is type(cy)
cc = 'C_CONTIGUOUS'
fc = 'F_CONTIGUOUS'
if c.flags[cc] or c.flags[fc]:
assert c.flags[cc] == cy.flags[cc]
assert c.flags[fc] == cy.flags[fc]
else:
# Original was not contiguous but round-trip version
# should be c-contig.
assert cy.flags[cc]
if hasattr(c, 'unit'):
assert c.unit == cy.unit
def compare_coord(c, cy):
assert c.shape == cy.shape
assert c.frame.name == cy.frame.name
assert list(c.get_frame_attr_names()) == list(cy.get_frame_attr_names())
for attr in c.get_frame_attr_names():
assert getattr(c, attr) == getattr(cy, attr)
assert (list(c.representation_component_names) ==
list(cy.representation_component_names))
for name in c.representation_component_names:
assert np.all(getattr(c, attr) == getattr(cy, attr))
@pytest.mark.parametrize('frame', ['fk4', 'altaz'])
def test_skycoord(frame):
c = SkyCoord([[1, 2], [3, 4]], [[5, 6], [7, 8]],
unit='deg', frame=frame,
obstime=Time('2016-01-02'),
location=EarthLocation(1000, 2000, 3000, unit=u.km))
cy = load(dump(c))
compare_coord(c, cy)
def _get_time():
t = Time([[1], [2]], format='cxcsec',
location=EarthLocation(1000, 2000, 3000, unit=u.km))
t.format = 'iso'
t.precision = 5
t.delta_ut1_utc = np.array([[3.0], [4.0]])
t.delta_tdb_tt = np.array([[5.0], [6.0]])
t.out_subfmt = 'date_hm'
return t
def compare_time(t, ty):
assert type(t) is type(ty)
assert np.all(t == ty)
for attr in ('shape', 'jd1', 'jd2', 'format', 'scale', 'precision', 'in_subfmt',
'out_subfmt', 'location', 'delta_ut1_utc', 'delta_tdb_tt'):
assert np.all(getattr(t, attr) == getattr(ty, attr))
def test_time():
t = _get_time()
ty = load(dump(t))
compare_time(t, ty)
def test_timedelta():
t = _get_time()
dt = t - t + 0.1234556 * u.s
dty = load(dump(dt))
assert type(dt) is type(dty)
for attr in ('shape', 'jd1', 'jd2', 'format', 'scale'):
assert np.all(getattr(dt, attr) == getattr(dty, attr))
def test_serialized_column():
sc = SerializedColumn({'name': 'hello', 'other': 1, 'other2': 2.0})
scy = load(dump(sc))
assert sc == scy
def test_load_all():
t = _get_time()
unit = u.m / u.s
c = SkyCoord([[1, 2], [3, 4]], [[5, 6], [7, 8]],
unit='deg', frame='fk4',
obstime=Time('2016-01-02'),
location=EarthLocation(1000, 2000, 3000, unit=u.km))
# Make a multi-document stream
out = ('---\n' + dump(t)
+ '---\n' + dump(unit)
+ '---\n' + dump(c))
ty, unity, cy = list(load_all(out))
compare_time(t, ty)
compare_coord(c, cy)
assert unity == unit
@pytest.mark.skipif('not HAS_YAML')
def test_ecsv_astropy_objects_in_meta():
"""
Test that astropy core objects in ``meta`` are serialized.
"""
t = QTable([[1, 2] * u.m, [4, 5]], names=['a', 'b'])
tm = _get_time()
c = SkyCoord([[1, 2], [3, 4]], [[5, 6], [7, 8]],
unit='deg', frame='fk4',
obstime=Time('2016-01-02'),
location=EarthLocation(1000, 2000, 3000, unit=u.km))
unit = u.m / u.s
t.meta = {'tm': tm, 'c': c, 'unit': unit}
out = StringIO()
t.write(out, format='ascii.ecsv')
t2 = QTable.read(out.getvalue(), format='ascii.ecsv')
compare_time(tm, t2.meta['tm'])
compare_coord(c, t2.meta['c'])
assert t2.meta['unit'] == unit
| |
"""Test the Dyson fan component."""
import unittest
from unittest import mock
from homeassistant.components.dyson import DYSON_DEVICES
from homeassistant.components.fan import dyson
from tests.common import get_test_home_assistant
from libpurecoollink.const import FanSpeed, FanMode, NightMode, Oscillation
from libpurecoollink.dyson_pure_state import DysonPureCoolState
from libpurecoollink.dyson_pure_cool_link import DysonPureCoolLink
class MockDysonState(DysonPureCoolState):
"""Mock Dyson state."""
def __init__(self):
"""Create new Mock Dyson State."""
pass
def _get_device_with_no_state():
"""Return a device with no state."""
device = mock.Mock()
device.name = "Device_name"
device.state = None
return device
def _get_device_off():
"""Return a device with state off."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "OFF"
device.state.night_mode = "ON"
device.state.speed = "0004"
return device
def _get_device_auto():
"""Return a device with state auto."""
device = mock.Mock()
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "AUTO"
device.state.night_mode = "ON"
device.state.speed = "AUTO"
return device
def _get_device_on():
"""Return a valid state on."""
device = mock.Mock(spec=DysonPureCoolLink)
device.name = "Device_name"
device.state = mock.Mock()
device.state.fan_mode = "FAN"
device.state.fan_state = "FAN"
device.state.oscillation = "ON"
device.state.night_mode = "OFF"
device.state.speed = "0001"
return device
class DysonTest(unittest.TestCase):
"""Dyson Sensor component test class."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component_with_no_devices(self):
"""Test setup component with no devices."""
self.hass.data[dyson.DYSON_DEVICES] = []
add_devices = mock.MagicMock()
dyson.setup_platform(self.hass, None, add_devices)
add_devices.assert_called_with([])
def test_setup_component(self):
"""Test setup component with devices."""
def _add_device(devices):
assert len(devices) == 1
assert devices[0].name == "Device_name"
device_fan = _get_device_on()
device_non_fan = _get_device_off()
self.hass.data[dyson.DYSON_DEVICES] = [device_fan, device_non_fan]
dyson.setup_platform(self.hass, None, _add_device)
def test_dyson_set_speed(self):
"""Test set fan speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.set_speed("1")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN,
fan_speed=FanSpeed.FAN_SPEED_1)
component.set_speed("AUTO")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
def test_dyson_turn_on(self):
"""Test turn on fan."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.turn_on()
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN)
def test_dyson_turn_night_mode(self):
"""Test turn on fan with night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.night_mode(True)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_ON)
component.night_mode(False)
set_config = device.set_configuration
set_config.assert_called_with(night_mode=NightMode.NIGHT_MODE_OFF)
def test_is_night_mode(self):
"""Test night mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.is_night_mode)
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertTrue(component.is_night_mode)
def test_dyson_turn_auto_mode(self):
"""Test turn on/off fan with auto mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.auto_mode(True)
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
component.auto_mode(False)
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN)
def test_is_auto_mode(self):
"""Test auto mode."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.is_auto_mode)
device = _get_device_auto()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertTrue(component.is_auto_mode)
def test_dyson_turn_on_speed(self):
"""Test turn on fan with specified speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.turn_on("1")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.FAN,
fan_speed=FanSpeed.FAN_SPEED_1)
component.turn_on("AUTO")
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.AUTO)
def test_dyson_turn_off(self):
"""Test turn off fan."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.should_poll)
component.turn_off()
set_config = device.set_configuration
set_config.assert_called_with(fan_mode=FanMode.OFF)
def test_dyson_oscillate_off(self):
"""Test turn off oscillation."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.oscillate(False)
set_config = device.set_configuration
set_config.assert_called_with(oscillation=Oscillation.OSCILLATION_OFF)
def test_dyson_oscillate_on(self):
"""Test turn on oscillation."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.oscillate(True)
set_config = device.set_configuration
set_config.assert_called_with(oscillation=Oscillation.OSCILLATION_ON)
def test_dyson_oscillate_value_on(self):
"""Test get oscillation value on."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertTrue(component.oscillating)
def test_dyson_oscillate_value_off(self):
"""Test get oscillation value off."""
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.oscillating)
def test_dyson_on(self):
"""Test device is on."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertTrue(component.is_on)
def test_dyson_off(self):
"""Test device is off."""
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.is_on)
device = _get_device_with_no_state()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertFalse(component.is_on)
def test_dyson_get_speed(self):
"""Test get device speed."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertEqual(component.speed, 1)
device = _get_device_off()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertEqual(component.speed, 4)
device = _get_device_with_no_state()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertIsNone(component.speed)
device = _get_device_auto()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertEqual(component.speed, "AUTO")
def test_dyson_get_direction(self):
"""Test get device direction."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertIsNone(component.current_direction)
def test_dyson_get_speed_list(self):
"""Test get speeds list."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertEqual(len(component.speed_list), 11)
def test_dyson_supported_features(self):
"""Test supported features."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
self.assertEqual(component.supported_features, 3)
def test_on_message(self):
"""Test when message is received."""
device = _get_device_on()
component = dyson.DysonPureCoolLinkDevice(self.hass, device)
component.entity_id = "entity_id"
component.schedule_update_ha_state = mock.Mock()
component.on_message(MockDysonState())
component.schedule_update_ha_state.assert_called_with()
def test_service_set_night_mode(self):
"""Test set night mode service."""
dyson_device = mock.MagicMock()
self.hass.data[DYSON_DEVICES] = []
dyson_device.entity_id = 'fan.living_room'
self.hass.data[dyson.DYSON_FAN_DEVICES] = [dyson_device]
dyson.setup_platform(self.hass, None, mock.MagicMock())
self.hass.services.call(dyson.DOMAIN, dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.bed_room",
"night_mode": True}, True)
assert not dyson_device.night_mode.called
self.hass.services.call(dyson.DOMAIN, dyson.SERVICE_SET_NIGHT_MODE,
{"entity_id": "fan.living_room",
"night_mode": True}, True)
dyson_device.night_mode.assert_called_with(True)
| |
import os
import pytest
from golem.cli import commands, messages
from golem.core import file_manager
from golem.gui.user_management import Users
class TestRunCommand:
@pytest.mark.slow
def test_golem_run_project_param_is_missing(self, project_session, capsys):
project_session.activate()
commands.run_command()
captured = capsys.readouterr()
assert messages.RUN_USAGE_MSG in captured.out
@pytest.mark.slow
def test_golem_run_project_does_not_exist(self, project_session):
project_session.activate()
with pytest.raises(SystemExit) as excinfo:
commands.run_command(project='incorrect')
assert str(excinfo.value) == 'golem run: error: the project incorrect does not exist'
@pytest.mark.slow
def test_golem_run_missing_test_query(self, project_session, capsys):
_, project = project_session.activate()
commands.run_command(project=project)
captured = capsys.readouterr()
assert messages.RUN_USAGE_MSG in captured.out
@pytest.mark.slow
def test_golem_run_suite(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = test_utils.create_random_test(project)
suite_name = test_utils.random_string()
test_utils.create_suite(project, suite_name, tests=[test_name])
commands.run_command(project=project, test_query=suite_name)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
# the execution report is created for suite
path = os.path.join(project_session.path, 'reports', suite_name)
assert os.path.isdir(path)
timestamp = os.listdir(path)[0]
report = os.path.join(path, timestamp, 'report.json')
assert os.path.isfile(report)
@pytest.mark.slow
def test_golem_run_suite_in_folder(self, project_session, test_utils, caplog, capsys):
testdir, project = project_session.activate()
test_name = test_utils.create_random_test(project)
suite_name = '{}.{}'.format(test_utils.random_string(), test_utils.random_string())
test_utils.create_suite(project, suite_name, tests=[test_name])
commands.run_command(project=project, test_query=suite_name)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
out, err = capsys.readouterr()
assert 'Tests found: 1' in out
@pytest.mark.slow
def test_golem_run_suite_py(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = test_utils.create_random_test(project)
suite_name = test_utils.random_string()
test_utils.create_suite(project, suite_name, tests=[test_name])
with_extension = suite_name + '.py'
commands.run_command(project=project, test_query=with_extension)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
@pytest.mark.slow
def test_golem_run_suite_py_in_folder(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = test_utils.create_random_test(project)
random_dir = test_utils.random_string()
suite_name = '{}.{}'.format(random_dir, 'suite_one')
suite_query = os.path.join(random_dir, 'suite_one.py')
test_utils.create_suite(project, suite_name, tests=[test_name])
commands.run_command(project=project, test_query=suite_query)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
@pytest.mark.slow
def test_golem_run_test(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = test_utils.create_random_test(project)
commands.run_command(project=project, test_query=test_name)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
# the execution report is created for suite
path = os.path.join(project_session.path, 'reports', 'single_tests', test_name)
assert os.path.isdir(path)
# only one timestamp
assert len(os.listdir(path)) == 1
@pytest.mark.slow
def test_golem_run_test_py(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = test_utils.create_random_test(project)
test_query = '{}.py'.format(test_name)
commands.run_command(project=project, test_query=test_query)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[2].message == 'Test Result: SUCCESS'
# the execution report is created for suite
path = os.path.join(project_session.path, 'reports', 'single_tests', test_name)
assert os.path.isdir(path)
# only one timestamp
assert len(os.listdir(path)) == 1
@pytest.mark.slow
def test_golem_run_test_in_folder(self, project_session, test_utils, caplog):
_, project = project_session.activate()
test_name = '{}.test_one'.format(test_utils.random_string())
test_utils.create_test(project, test_name)
commands.run_command(project=project, test_query=test_name)
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[4].message == 'Test Result: SUCCESS'
@pytest.mark.slow
def test_golem_run_test_py_in_folder(self, project_function, test_utils, caplog):
_, project = project_function.activate()
test_utils.create_test(project, 'folder.test_one')
commands.run_command(project=project, test_query='folder/test_one.py')
records = caplog.records
assert records[0].message == 'Test execution started: folder.test_one'
assert records[4].message == 'Test Result: SUCCESS'
@pytest.mark.slow
@pytest.mark.skipif("os.name != 'nt'")
def test_golem_run_test_py_in_folder_windows_path(self, project_function, test_utils,
caplog):
_, project = project_function.activate()
test_utils.create_test(project, 'folder.test_one')
commands.run_command(project=project, test_query='folder\\test_one.py')
records = caplog.records
assert records[0].message == 'Test execution started: folder.test_one'
assert records[4].message == 'Test Result: SUCCESS'
@pytest.mark.slow
def test_golem_run_directory(self, project_function, test_utils):
_, project = project_function.activate()
test_utils.create_test(project, 'test_one')
test_utils.create_test(project, 'foo.test_two')
test_utils.create_test(project, 'foo.test_three')
test_utils.create_test(project, 'foo.bar.test_four')
commands.run_command(project=project, test_query='foo')
reportsdir = os.path.join(project_function.path, 'reports', 'foo')
assert os.path.isdir(reportsdir)
assert len(os.listdir(reportsdir)) == 1
timestamp = os.listdir(reportsdir)[0]
timestampdir = os.path.join(reportsdir, timestamp)
tests = os.listdir(timestampdir)
assert len(tests) == 4
assert 'foo.test_two' in tests
assert 'foo.test_three' in tests
assert 'foo.bar.test_four' in tests
assert 'test_one' not in tests
@pytest.mark.slow
def test_golem_run_directory_all_tests(self, project_function, test_utils):
_, project = project_function.activate()
test_utils.create_test(project, 'test_one')
test_utils.create_test(project, 'foo.test_two')
test_utils.create_test(project, 'foo.bar.test_three')
commands.run_command(project=project, test_query='.')
reportsdir = os.path.join(project_function.path, 'reports', 'all')
assert os.path.isdir(reportsdir)
assert len(os.listdir(reportsdir)) == 1
timestamp = os.listdir(reportsdir)[0]
timestampdir = os.path.join(reportsdir, timestamp)
tests = os.listdir(timestampdir)
assert len(tests) == 4
assert 'test_one' in tests
assert 'foo.test_two' in tests
assert 'foo.bar.test_three' in tests
@pytest.mark.slow
def test_golem_run_directory_no_tests_present(self, project_function, capsys):
_, project = project_function.activate()
# run all tests, there are no tests in project
commands.run_command(project=project, test_query='.')
msg = 'No tests were found in {}'.format(os.path.join('tests', ''))
out, err = capsys.readouterr()
assert msg in out
# run tests in an empty directory
path = os.path.join(project_function.path, 'tests', 'foo')
file_manager.create_directory(path=path, add_init=True)
commands.run_command(project=project, test_query='foo')
msg = 'No tests were found in {}'.format(os.path.join('tests', 'foo'))
out, err = capsys.readouterr()
assert msg in out
class TestCreateDirectoryCommand:
def test_createdirectory_command(self, dir_function):
os.chdir(dir_function.path)
name = 'testdirectory_002'
commands.createdirectory_command(name)
testdir = os.path.join(dir_function.path, name)
assert os.path.isdir(testdir)
class TestCreateSuperUserCommand:
def test_create_superuser_command(self, testdir_class, test_utils, capsys):
testdir_class.activate()
username = test_utils.random_string(5)
email = test_utils.random_email()
password = test_utils.random_string(5)
commands.createsuperuser_command(username, email, password, no_input=True)
out, err = capsys.readouterr()
msg = 'Superuser {} was created successfully.'.format(username)
assert msg in out
assert Users.user_exists(username)
user = Users.get_user_by_username(username)
assert user.email == email
def test_create_superuser_command_invalid_email(self, testdir_class, test_utils, capsys):
testdir_class.activate()
username = test_utils.random_string(5)
email = 'test@'
password = test_utils.random_string(5)
with pytest.raises(SystemExit) as wrapped_execution:
commands.createsuperuser_command(username, email, password, no_input=True)
assert wrapped_execution.value.code == 1
captured = capsys.readouterr()
assert 'Error: {} is not a valid email address'.format(email) in captured.out
def test_create_superuser_command_no_email(self, testdir_class, test_utils):
testdir_class.activate()
username = test_utils.random_string(5)
password = test_utils.random_string(5)
commands.createsuperuser_command(username, None, password, no_input=True)
user = Users.get_user_by_username(username)
assert user.email is None
class TestExitStatuses:
content = """
description = 'A test which deliberately fails'
def setup(data):
pass
def test(data):
step('test step')
raise Exception('Intentional exception to trigger exit status == 1')
def teardown(data):
pass
"""
@pytest.mark.slow
def test_exit_code_one_on_test_failure_when_using_single_processing_capabilities(
self, project_function, test_utils):
_, project = project_function.activate()
test_utils.create_test(project, 'test_one', content=self.content)
test_utils.create_test(project, 'test_two')
with pytest.raises(SystemExit) as wrapped_execution:
commands.run_command(project=project, test_query='.', processes=1)
assert wrapped_execution.value.code == 1
@pytest.mark.slow
def test_exit_code_one_on_test_failure_when_using_multi_processing_capabilities(
self, project_function, test_utils):
_, project = project_function.activate()
test_utils.create_test(project, 'test_one', content=self.content)
test_utils.create_test(project, 'test_two')
with pytest.raises(SystemExit) as wrapped_execution:
commands.run_command(project=project, test_query='.', processes=2)
assert wrapped_execution.value.code == 1
| |
"""Unit tests for reviewbot.tools.rbsecretscanner."""
from __future__ import unicode_literals
import six
from reviewbot.tools.rbsecretscanner import SecretScannerTool
from reviewbot.tools.testing import (BaseToolTestCase,
ToolTestCaseMetaclass,
integration_test)
from reviewbot.utils.process import execute
@six.add_metaclass(ToolTestCaseMetaclass)
class SecretScannerToolTests(BaseToolTestCase):
"""Unit tests for SecretScannerTool."""
tool_class = SecretScannerTool
def test_get_can_handle_file(self):
"""Testing SecretScannerTool.get_can_handle_file"""
self.assertTrue(self.run_get_can_handle_file(filename='test.txt'))
self.assertTrue(self.run_get_can_handle_file(filename='test'))
self.assertTrue(self.run_get_can_handle_file(filename='test.py'))
@integration_test()
def test_execute_with_asana_access_token(self):
"""Testing SecretScannerTool.execute with Asana Access Token"""
self._run_token_test(
'1/1234567890123:abcdefghijklmnopqrsxyz1234567890')
self._run_token_test(
'Z1/1234567890123:abcdefghijklmnopqrsxyz1234567890',
match=False)
self._run_token_test(
'1/1234567890123:abcdefghijklmnopqrsxyz1234567890Z',
match=False)
@integration_test()
def test_execute_with_aws_access_key_a3t(self):
"""Testing SecretScannerTool.execute with AWS Access Key (A3T...)
"""
self._run_token_test('A3TA1234567890ABCDEF')
self._run_token_test('ZA3TA1234567890ABCDEF', match=False)
self._run_token_test('A3TA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_abia(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ABIA...)
"""
self._run_token_test('ABIA1234567890ABCDEF')
self._run_token_test('ZABIA1234567890ABCDEF', match=False)
self._run_token_test('ABIA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_acca(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ACCA...)
"""
self._run_token_test('ACCA1234567890ABCDEF')
self._run_token_test('ZACCA1234567890ABCDEF', match=False)
self._run_token_test('ACCA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_agpa(self):
"""Testing SecretScannerTool.execute with AWS Access Key (AGPA...)
"""
self._run_token_test('AGPA1234567890ABCDEF')
self._run_token_test('ZAGPA1234567890ABCDEF', match=False)
self._run_token_test('AGPA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_aida(self):
"""Testing SecretScannerTool.execute with AWS Access Key (AIDA...)
"""
self._run_token_test('AIDA1234567890ABCDEF')
self._run_token_test('ZAIDA1234567890ABCDEF', match=False)
self._run_token_test('AIDA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_aipa(self):
"""Testing SecretScannerTool.execute with AWS Access Key (AIPA...)
"""
self._run_token_test('AIPA1234567890ABCDEF')
self._run_token_test('ZAIPA1234567890ABCDEF', match=False)
self._run_token_test('AIPA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_akia(self):
"""Testing SecretScannerTool.execute with AWS Access Key (AKIA...)
"""
self._run_token_test('AKIA1234567890ABCDEF')
self._run_token_test('ZAKIA1234567890ABCDEF', match=False)
self._run_token_test('AKIA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_anpa(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ANPA...)
"""
self._run_token_test('ANPA1234567890ABCDEF')
self._run_token_test('ZANPA1234567890ABCDEF', match=False)
self._run_token_test('ANPA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_anva(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ANVA...)
"""
self._run_token_test('ANVA1234567890ABCDEF')
self._run_token_test('ZANVA1234567890ABCDEF', match=False)
self._run_token_test('ANVA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_apka(self):
"""Testing SecretScannerTool.execute with AWS Access Key (APKA...)
"""
self._run_token_test('APKA1234567890ABCDEF')
self._run_token_test('ZAPKA1234567890ABCDEF', match=False)
self._run_token_test('APKA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_aroa(self):
"""Testing SecretScannerTool.execute with AWS Access Key (AROA...)
"""
self._run_token_test('AROA1234567890ABCDEF')
self._run_token_test('ZAROA1234567890ABCDEF', match=False)
self._run_token_test('AROA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_asca(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ASCA...)
"""
self._run_token_test('ASCA1234567890ABCDEF')
self._run_token_test('ZASCA1234567890ABCDEF', match=False)
self._run_token_test('ASCA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_access_key_asia(self):
"""Testing SecretScannerTool.execute with AWS Access Key (ASIA...)
"""
self._run_token_test('ASIA1234567890ABCDEF')
self._run_token_test('ZASIA1234567890ABCDEF', match=False)
self._run_token_test('ASIA1234567890ABCDEFZ', match=False)
@integration_test()
def test_execute_with_aws_mws_key(self):
"""Testing SecretScannerTool.execute with AWS MWS Key"""
self._run_token_test(
'amzn.mws.1234abcd-12ab-34de-56fa-123456abcdef')
@integration_test()
def test_execute_with_aws_secret_key(self):
"""Testing SecretScannerTool.execute with AWS Secret Key"""
self._run_token_test(
'AWS_SECRET_KEY="1234567890+ABCDEFGHIJ+1234567890+TUVWXYZ')
@integration_test()
def test_execute_with_certificate(self):
"""Testing SecretScannerTool.execute with certificate"""
self._run_token_test('-----END CERTIFICATE-----')
@integration_test()
def test_execute_with_discord_bot_token(self):
"""Testing SecretScannerTool.execute with Discord Bot Token"""
self._run_token_test(
'ABCDEFGHabcdefgh12345678.ABcd12.abcdefgh_ABCDEFGH-123456789')
@integration_test()
def test_execute_with_discord_webhook_url(self):
"""Testing SecretScannerTool.execute with Discord WebHook URL"""
self._run_token_test(
'https://discord.com/api/webhooks/1234567890/abcdefghijklmnopq'
'rstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ-123456789')
self._run_token_test(
'http://discord.com/api/webhooks/1234567890/abcdefghijklmnopq'
'rstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ-123456789')
@integration_test()
def test_execute_with_dropbox_short_lived_access_token(self):
"""Testing SecretScannerTool.execute with Dropbox Short-Lived
Access Token
"""
self._run_token_test(
'sl.Auabcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTUVWXYZ_'
'1234567890-abcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTU'
'VWXZ_123456789_ABCDE')
self._run_token_test(
'Zsl.Auabcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTUVWXYZ_'
'1234567890-abcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTU'
'VWXZ_123456789_ABCDE',
match=False)
@integration_test()
def test_execute_with_facebook_access_token(self):
"""Testing SecretScannerTool.execute with Facebook Access Token"""
self._run_token_test('EAACEdEose0cBA1234567890ABCDwxyz')
self._run_token_test('ZEAACEdEose0cBA1234567890ABCDwxyz',
match=False)
@integration_test()
def test_execute_with_github_legacy_oauth_token(self):
"""Testing SecretScannerTool.execute with legacy GitHub OAuth
Token
"""
# Lower bounds of length.
self._run_token_test(
'GITHUB_TOKEN=1234567890ABCDEFGabcdefg12345XYZxyz')
# Upper bounds of length.
self._run_token_test(
'GITHUB_TOKEN=1234567890ABCDEFGabcdefg12345XYZxyz12345')
@integration_test()
def test_execute_with_github_oauth_token_gho(self):
"""Testing SecretScannerTool.execute with GitHub token (gho...)"""
# Lower bounds of length.
self._run_token_test(
'gho_1234567890ABCDEFGabcdefg1234508vKGb')
# Upper bounds of length.
self._run_token_test(
'gho_1234567890abcdef1234567890abcdef1234567890abcdef1234567890a'
'bcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'
'abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456789'
'0abcdef1234567890abcdef1234567890abcdef1234567890abcdef123451X8'
'8A8')
# Real-world (invalidated) token.
self._run_token_test(
'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')
# Don't match these.
self._run_token_test(
'Zgho_1234567890ABCDEFGabcdefg1234508vKGb',
match=False)
self._run_token_test(
'gho_1234567890ABCDEFGabcdefg1234508vKGbZ',
match=False)
@integration_test()
def test_execute_with_github_oauth_token_ghp(self):
"""Testing SecretScannerTool.execute with GitHub token (ghp...)"""
# Lower bounds of length.
self._run_token_test(
'ghp_1234567890ABCDEFGabcdefg1234508vKGb')
# Upper bounds of length.
self._run_token_test(
'ghp_1234567890abcdef1234567890abcdef1234567890abcdef1234567890a'
'bcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'
'abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456789'
'0abcdef1234567890abcdef1234567890abcdef1234567890abcdef123451X8'
'8A8')
# Real-world (invalidated) token.
self._run_token_test(
'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')
# Don't match these.
self._run_token_test(
'Zghp_1234567890ABCDEFGabcdefg1234508vKGb',
match=False)
self._run_token_test(
'ghp_1234567890ABCDEFGabcdefg1234508vKGbZ',
match=False)
@integration_test()
def test_execute_with_github_oauth_token_ghr(self):
"""Testing SecretScannerTool.execute with GitHub token (ghr...)"""
# Lower bounds of length.
self._run_token_test(
'ghr_1234567890ABCDEFGabcdefg1234508vKGb')
# Upper bounds of length.
self._run_token_test(
'ghr_1234567890abcdef1234567890abcdef1234567890abcdef1234567890a'
'bcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'
'abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456789'
'0abcdef1234567890abcdef1234567890abcdef1234567890abcdef123451X8'
'8A8')
# Real-world (invalidated) token.
self._run_token_test(
'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')
# Don't match these.
self._run_token_test(
'Zghr_1234567890ABCDEFGabcdefg1234508vKGb',
match=False)
self._run_token_test(
'ghr_1234567890ABCDEFGabcdefg1234508vKGbZ',
match=False)
@integration_test()
def test_execute_with_github_oauth_token_ghs(self):
"""Testing SecretScannerTool.execute with GitHub token (ghs...)"""
# Lower bounds of length.
self._run_token_test(
'ghs_1234567890ABCDEFGabcdefg1234508vKGb')
# Upper bounds of length.
self._run_token_test(
'ghs_1234567890abcdef1234567890abcdef1234567890abcdef1234567890a'
'bcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'
'abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456789'
'0abcdef1234567890abcdef1234567890abcdef1234567890abcdef123451X8'
'8A8')
# Real-world (invalidated) token.
self._run_token_test(
'ghs_Anawk3Qg2P7at173OpuF29DF2SMEDv0ZBObL')
# Don't match these.
self._run_token_test(
'Zghs_1234567890ABCDEFGabcdefg1234508vKGb',
match=False)
self._run_token_test(
'ghs_1234567890ABCDEFGabcdefg1234508vKGbZ',
match=False)
@integration_test()
def test_execute_with_github_oauth_token_ghu(self):
"""Testing SecretScannerTool.execute with GitHub token (ghu...)"""
# Lower bounds of length.
self._run_token_test(
'ghu_1234567890ABCDEFGabcdefg1234508vKGb')
# Upper bounds of length.
self._run_token_test(
'ghu_1234567890abcdef1234567890abcdef1234567890abcdef1234567890a'
'bcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890'
'abcdef1234567890abcdef1234567890abcdef1234567890abcdef123456789'
'0abcdef1234567890abcdef1234567890abcdef1234567890abcdef123451X8'
'8A8')
# Real-world (invalidated) token.
self._run_token_test(
'ghu_EvjxC2hvFZtSn6bsKAMTWkKsTpdJ0f2fYhqu')
# Don't match these.
self._run_token_test(
'Zghu_1234567890ABCDEFGabcdefg1234508vKGb',
match=False)
self._run_token_test(
'ghu_1234567890ABCDEFGabcdefg1234508vKGbZ',
match=False)
@integration_test()
def test_execute_with_google_gcp_api_key(self):
"""Testing SecretScannerTool.execute with Google GCP API Key"""
self._run_token_test('ABCDEFGabcdefg123456789ZYXWzywxSTUVstuv')
self._run_token_test('ZABCDEFGabcdefg123456789ZYXWzywxSTUVstuv',
match=False)
self._run_token_test('ABCDEFGabcdefg123456789ZYXWzywxSTUVstuvZ',
match=False)
@integration_test()
def test_execute_with_google_gcp_client_id(self):
"""Testing SecretScannerTool.execute with Google GCP Client ID"""
self._run_token_test(
'1234567890123-abcdefghijklmnopqrstuvwxyz123456.apps.'
'googleusercontent.com')
@integration_test()
def test_execute_with_google_gcp_service_account_config(self):
"""Testing SecretScannerTool.execute with Google GCP Service
Account configuration
"""
self._run_token_test('"type": "service_account"')
self._run_token_test("'type': 'service_account'")
@integration_test()
def test_execute_with_google_gcp_service_account_id(self):
"""Testing SecretScannerTool.execute with Google GCP Service
Account e-mail ID
"""
self._run_token_test('my-service@appspot.gserviceaccount.com')
self._run_token_test('my-service@developer.gserviceaccount.com')
@integration_test()
def test_execute_with_heroku_api_key(self):
"""Testing SecretScannerTool.execute with Heroku API Key"""
self._run_token_test(
'HEROKU_API_KEY=1234abcd-12ab-34cd-56ef-123456abcdef')
@integration_test()
def test_execute_with_json_web_token(self):
"""Testing SecretScannerTool.execute with JSON Web Token"""
self._run_token_test(
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkiOiJ2YWx1ZSJ9.'
'5pps1XMxciBCpOhezqTk9XuGny-4_HZ9aEKp3AqgekA')
self._run_token_test(
'W10K.eyJrZXkiOiJ2YWx1ZSJ9.8nUniKZ63ZQLtj401tAGgVLo0Fm1LAOe'
'M5vPSBY3-os',
match=False)
self._run_token_test(
'eyJ0eXAiOiAib3RoZXIifQo=.eyJrZXkiOiJ2YWx1ZSJ9.'
'8nUniKZ63ZQLtj401tAGgVLo0Fm1LAOeM5vPSBY3-os',
match=False)
self._run_token_test(
'foo.bar.baz',
match=False)
@integration_test()
def test_execute_with_mailchimp_api_key(self):
"""Testing SecretScannerTool.execute with Mailchimp API key"""
self._run_token_test('abcdef1234567890abcdef1234567890-us1')
self._run_token_test('abcdef1234567890abcdef1234567890-us12')
self._run_token_test('Zabcdef1234567890abcdef1234567890-us12',
match=False)
self._run_token_test('abcdef1234567890abcdef1234567890-us12Z',
match=False)
@integration_test()
def test_execute_with_mailgun_api_key(self):
"""Testing SecretScannerTool.execute with Mailgun API key"""
self._run_token_test('key-abcdefghijklmnopqrstuvwxyz123456')
self._run_token_test('Zkey-abcdefghijklmnopqrstuvwxyz123456',
match=False)
self._run_token_test('key-abcdefghijklmnopqrstuvwxyz123456Z',
match=False)
@integration_test()
def test_execute_with_npm_access_token(self):
"""Testing SecretScannerTool.execute with NPM Access Token"""
self._run_token_test('abcd1234-ab12-cd34-ef56-abcdef123456')
self._run_token_test('Zabcd1234-ab12-cd34-ef56-abcdef123456',
match=False)
self._run_token_test('abcd1234-ab12-cd34-ef56-abcdef123456Z',
match=False)
@integration_test()
def test_execute_with_pgp_private_key(self):
"""Testing SecretScannerTool.execute with PGP Private Key"""
self._run_token_test('----BEGIN PGP PRIVATE KEY BLOCK----')
@integration_test()
def test_execute_with_pypi_api_token(self):
"""Testing SecretScannerTool.execute with PyAPI API Token"""
self._run_token_test(
'pypi-abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ_'
'0123456789_abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRST'
'UVWXYZ_0123456789_abcdefghijklmnopqrstuvwxyz')
self._run_token_test(
'pypi:abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ_'
'0123456789_abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRST'
'UVWXYZ_0123456789_abcdefghijklmnopqrstuvwxyz')
@integration_test()
def test_execute_with_rsa_private_key(self):
"""Testing SecretScannerTool.execute with RSA Private Key"""
self._run_token_test('----BEGIN RSA PRIVATE KEY----')
@integration_test()
def test_execute_with_ssh_dsa_private_key(self):
"""Testing SecretScannerTool.execute with SSH (DSA) Private Key"""
self._run_token_test('----BEGIN DSA PRIVATE KEY----')
@integration_test()
def test_execute_with_ssh_ec_private_key(self):
"""Testing SecretScannerTool.execute with SSH (EC) Private Key"""
self._run_token_test('----BEGIN EC PRIVATE KEY----')
@integration_test()
def test_execute_with_openssh_private_key(self):
"""Testing SecretScannerTool.execute with OPENSSH Private Key
"""
self._run_token_test('----BEGIN OPENSSH PRIVATE KEY----')
@integration_test()
def test_execute_with_slack_token_xoxa(self):
"""Testing SecretScannerTool.execute with Slack Token (xoxa-2-...)
"""
self._run_token_test(
'xoxa-2-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345')
self._run_token_test(
'Zxoxa-2-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345',
match=False)
@integration_test()
def test_execute_with_slack_token_xoxb(self):
"""Testing SecretScannerTool.execute with Slack Token (xoxb...)
"""
self._run_token_test(
'xoxb-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345')
self._run_token_test(
'Zxoxb-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345',
match=False)
@integration_test()
def test_execute_with_slack_token_xoxo(self):
"""Testing SecretScannerTool.execute with Slack Token (xoxo...)
"""
self._run_token_test(
'xoxo-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345')
self._run_token_test(
'Zxoxo-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345',
match=False)
@integration_test()
def test_execute_with_slack_token_xoxp(self):
"""Testing SecretScannerTool.execute with Slack Token (xoxp...)
"""
self._run_token_test(
'xoxp-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345')
self._run_token_test(
'Zxoxp-12345678901-12345678901-1234567890123-'
'abcdefghijklmnopqrstuvwxyz123456',
match=False)
@integration_test()
def test_execute_with_slack_token_xoxr(self):
"""Testing SecretScannerTool.execute with Slack Token (xoxr...)
"""
self._run_token_test(
'xoxr-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345')
self._run_token_test(
'Zxoxr-123456789012-123456789012-123456789012-'
'abcdefghijklmnopqrstuvwxyz012345',
match=False)
@integration_test()
def test_execute_with_slack_webhook_url(self):
"""Testing SecretScannerTool.execute with Slack WebHook URL"""
self._run_token_test(
'https://hooks.slack.com/services/TABCDEFGH/BACDEFGH/abcdEFGHijkl')
self._run_token_test(
'https://hooks.slack.com/workflows/TABCDEFGH/BACDEFGH/abcdEFGHijk')
self._run_token_test(
'http://hooks.slack.com/workflows/TABCDEFGH/BACDEFGH/abcdEFGHijk')
@integration_test()
def test_execute_with_stripe_live_api_key(self):
"""Testing SecretScannerTool.execute with Stripe API key
(sk_live_...)
"""
self._run_token_test('sk_live_abcdEFGH1234ZYXWzyxw6789')
self._run_token_test('Zsk_live_abcdEFGH1234ZYXWzyxw6789',
match=False)
@integration_test()
def test_execute_with_stripe_test_api_key(self):
"""Testing SecretScannerTool.execute with Stripe API key
(sk_test_...)
"""
self._run_token_test('sk_test_abcdEFGH1234ZYXWzyxw6789')
self._run_token_test('Zsk_test_abcdEFGH1234ZYXWzyxw6789',
match=False)
@integration_test()
def test_execute_with_twilio_account_sid(self):
"""Testing SecretScannerTool.execute with Twilio Account SID"""
self._run_token_test('ACabcdef1234567890abcdef1234567890')
self._run_token_test('ZACabcdef1234567890abcdef1234567890',
match=False)
self._run_token_test('ACabcdef1234567890abcdef1234567890Z',
match=False)
@integration_test()
def test_execute_with_twilio_api_key(self):
"""Testing SecretScannerTool.execute with Twilio API Key"""
self._run_token_test('SKabcdef1234567890abcdef1234567890')
self._run_token_test('ZSKabcdef1234567890abcdef1234567890',
match=False)
self._run_token_test('SKabcdef1234567890abcdef1234567890Z',
match=False)
@integration_test()
def test_execute_with_twitter_oauth(self):
"""Testing SecretScannerTool.execute with Twitter OAuth Token"""
# Lower bounds of length.
self._run_token_test(
'TWITTER_OAUTH_TOKEN=1234567890ABCDEFGabcdefg12345XYZxyz')
# Upper bounds of length.
self._run_token_test(
'TWITTER_OAUTH_TOKEN=1234567890ABCDEFGabcdefg12345XYZxyz12345ABcd')
@integration_test()
def test_execute_with_success(self):
"""Testing SecretScannerTool.execute with successful result"""
self._run_token_test('', match=False)
def _run_token_test(self, token, match=True):
"""Run an execution test with a given token.
Args:
token (unicode):
The token to test for.
match (bool, optional):
Whether this should expect a token match.
Raises:
AssertionError:
The resulting state didn't match expectations.
"""
review, review_file = self.run_tool_execute(
filename='test.py',
file_contents=(
b'def func():\n'
b' call_api(%s)\n'
% token.encode('utf-8')
))
if match:
self.assertEqual(review.comments, [
{
'filediff_id': review_file.id,
'first_line': 2,
'num_lines': 1,
'text': (
'This line appears to contain a hard-coded '
'credential, which is a potential security risk. '
'Please verify this, and revoke the credential if '
'needed.\n'
'\n'
'Column: 14'
),
'issue_opened': True,
'rich_text': False,
},
])
else:
self.assertEqual(review.comments, [])
self.assertSpyNotCalled(execute)
| |
"""Media Player component to integrate TVs exposing the Joint Space API."""
from __future__ import annotations
from typing import Any
from haphilipsjs import ConnectionFailure
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.media_player import (
DEVICE_CLASS_TV,
PLATFORM_SCHEMA,
BrowseMedia,
MediaPlayerEntity,
)
from homeassistant.components.media_player.const import (
MEDIA_CLASS_APP,
MEDIA_CLASS_CHANNEL,
MEDIA_CLASS_DIRECTORY,
MEDIA_TYPE_APP,
MEDIA_TYPE_APPS,
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_CHANNELS,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import LOGGER as _LOGGER, PhilipsTVDataUpdateCoordinator
from .const import CONF_SYSTEM, DOMAIN
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
| SUPPORT_BROWSE_MEDIA
| SUPPORT_PLAY
| SUPPORT_PAUSE
| SUPPORT_STOP
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_API_VERSION = 1
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_HOST),
cv.deprecated(CONF_NAME),
cv.deprecated(CONF_API_VERSION),
cv.deprecated(CONF_ON_ACTION),
PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Remove(CONF_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): vol.Coerce(
int
),
vol.Remove(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
),
)
def _inverted(data):
return {v: k for k, v in data.items()}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Philips TV platform."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=config,
)
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: config_entries.ConfigEntry,
async_add_entities,
):
"""Set up the configuration entry."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities(
[
PhilipsTVMediaPlayer(
coordinator,
config_entry.data[CONF_SYSTEM],
config_entry.unique_id,
)
]
)
class PhilipsTVMediaPlayer(CoordinatorEntity, MediaPlayerEntity):
"""Representation of a Philips TV exposing the JointSpace API."""
_attr_device_class = DEVICE_CLASS_TV
def __init__(
self,
coordinator: PhilipsTVDataUpdateCoordinator,
system: dict[str, Any],
unique_id: str,
) -> None:
"""Initialize the Philips TV."""
self._tv = coordinator.api
self._coordinator = coordinator
self._sources = {}
self._channels = {}
self._supports = SUPPORT_PHILIPS_JS
self._system = system
self._unique_id = unique_id
self._state = STATE_OFF
self._media_content_type: str | None = None
self._media_content_id: str | None = None
self._media_title: str | None = None
self._media_channel: str | None = None
super().__init__(coordinator)
self._update_from_coordinator()
async def _async_update_soon(self):
"""Reschedule update task."""
self.async_write_ha_state()
await self.coordinator.async_request_refresh()
@property
def name(self):
"""Return the device name."""
return self._system["name"]
@property
def supported_features(self):
"""Flag media player features that are supported."""
supports = self._supports
if self._coordinator.turn_on or (
self._tv.on and self._tv.powerstate is not None
):
supports |= SUPPORT_TURN_ON
return supports
@property
def state(self):
"""Get the device state. An exception means OFF state."""
if self._tv.on and (self._tv.powerstate == "On" or self._tv.powerstate is None):
return STATE_ON
return STATE_OFF
@property
def source(self):
"""Return the current input source."""
return self._sources.get(self._tv.source_id)
@property
def source_list(self):
"""List of available input sources."""
return list(self._sources.values())
async def async_select_source(self, source):
"""Set the input source."""
source_id = _inverted(self._sources).get(source)
if source_id:
await self._tv.setSource(source_id)
await self._async_update_soon()
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._tv.volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._tv.muted
async def async_turn_on(self):
"""Turn on the device."""
if self._tv.on and self._tv.powerstate:
await self._tv.setPowerState("On")
self._state = STATE_ON
else:
await self._coordinator.turn_on.async_run(self.hass, self._context)
await self._async_update_soon()
async def async_turn_off(self):
"""Turn off the device."""
await self._tv.sendKey("Standby")
self._state = STATE_OFF
await self._async_update_soon()
async def async_volume_up(self):
"""Send volume up command."""
await self._tv.sendKey("VolumeUp")
await self._async_update_soon()
async def async_volume_down(self):
"""Send volume down command."""
await self._tv.sendKey("VolumeDown")
await self._async_update_soon()
async def async_mute_volume(self, mute):
"""Send mute command."""
if self._tv.muted != mute:
await self._tv.sendKey("Mute")
await self._async_update_soon()
else:
_LOGGER.debug("Ignoring request when already in expected state")
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
await self._tv.setVolume(volume, self._tv.muted)
await self._async_update_soon()
async def async_media_previous_track(self):
"""Send rewind command."""
await self._tv.sendKey("Previous")
await self._async_update_soon()
async def async_media_next_track(self):
"""Send fast forward command."""
await self._tv.sendKey("Next")
await self._async_update_soon()
async def async_media_play_pause(self):
"""Send pause command to media player."""
if self._tv.quirk_playpause_spacebar:
await self._tv.sendUnicode(" ")
else:
await self._tv.sendKey("PlayPause")
await self._async_update_soon()
async def async_media_play(self):
"""Send pause command to media player."""
await self._tv.sendKey("Play")
await self._async_update_soon()
async def async_media_pause(self):
"""Send play command to media player."""
await self._tv.sendKey("Pause")
await self._async_update_soon()
async def async_media_stop(self):
"""Send play command to media player."""
await self._tv.sendKey("Stop")
await self._async_update_soon()
@property
def media_channel(self):
"""Get current channel if it's a channel."""
return self._media_channel
@property
def media_title(self):
"""Title of current playing media."""
return self._media_title
@property
def media_content_type(self):
"""Return content type of playing media."""
return self._media_content_type
@property
def media_content_id(self):
"""Content type of current playing media."""
return self._media_content_id
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._media_content_id and self._media_content_type in (
MEDIA_TYPE_APP,
MEDIA_TYPE_CHANNEL,
):
return self.get_browse_image_url(
self._media_content_type, self._media_content_id, media_image_id=None
)
return None
@property
def app_id(self):
"""ID of the current running app."""
return self._tv.application_id
@property
def app_name(self):
"""Name of the current running app."""
app = self._tv.applications.get(self._tv.application_id)
if app:
return app.get("label")
@property
def unique_id(self):
"""Return unique identifier if known."""
return self._unique_id
@property
def device_info(self):
"""Return a device description for device registry."""
return {
"name": self._system["name"],
"identifiers": {
(DOMAIN, self._unique_id),
},
"model": self._system.get("model"),
"manufacturer": "Philips",
"sw_version": self._system.get("softwareversion"),
}
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
list_id, _, channel_id = media_id.partition("/")
if channel_id:
await self._tv.setChannel(channel_id, list_id)
await self._async_update_soon()
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
elif media_type == MEDIA_TYPE_APP:
app = self._tv.applications.get(media_id)
if app:
await self._tv.setApplication(app["intent"])
await self._async_update_soon()
else:
_LOGGER.error("Unable to find application <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
async def async_browse_media_channels(self, expanded):
"""Return channel media objects."""
if expanded:
children = [
BrowseMedia(
title=channel.get("name", f"Channel: {channel_id}"),
media_class=MEDIA_CLASS_CHANNEL,
media_content_id=f"alltv/{channel_id}",
media_content_type=MEDIA_TYPE_CHANNEL,
can_play=True,
can_expand=False,
)
for channel_id, channel in self._tv.channels.items()
]
else:
children = None
return BrowseMedia(
title="Channels",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="channels",
media_content_type=MEDIA_TYPE_CHANNELS,
children_media_class=MEDIA_CLASS_CHANNEL,
can_play=False,
can_expand=True,
children=children,
)
async def async_browse_media_favorites(self, list_id, expanded):
"""Return channel media objects."""
if expanded:
favorites = await self._tv.getFavoriteList(list_id)
if favorites:
def get_name(channel):
channel_data = self._tv.channels.get(str(channel["ccid"]))
if channel_data:
return channel_data["name"]
return f"Channel: {channel['ccid']}"
children = [
BrowseMedia(
title=get_name(channel),
media_class=MEDIA_CLASS_CHANNEL,
media_content_id=f"{list_id}/{channel['ccid']}",
media_content_type=MEDIA_TYPE_CHANNEL,
can_play=True,
can_expand=False,
)
for channel in favorites
]
else:
children = None
else:
children = None
favorite = self._tv.favorite_lists[list_id]
return BrowseMedia(
title=favorite.get("name", f"Favorites {list_id}"),
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id=f"favorites/{list_id}",
media_content_type=MEDIA_TYPE_CHANNELS,
children_media_class=MEDIA_CLASS_CHANNEL,
can_play=False,
can_expand=True,
children=children,
)
async def async_browse_media_applications(self, expanded):
"""Return application media objects."""
if expanded:
children = [
BrowseMedia(
title=application["label"],
media_class=MEDIA_CLASS_APP,
media_content_id=application_id,
media_content_type=MEDIA_TYPE_APP,
can_play=True,
can_expand=False,
thumbnail=self.get_browse_image_url(
MEDIA_TYPE_APP, application_id, media_image_id=None
),
)
for application_id, application in self._tv.applications.items()
]
else:
children = None
return BrowseMedia(
title="Applications",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="applications",
media_content_type=MEDIA_TYPE_APPS,
children_media_class=MEDIA_CLASS_APP,
can_play=False,
can_expand=True,
children=children,
)
async def async_browse_media_favorite_lists(self, expanded):
"""Return favorite media objects."""
if self._tv.favorite_lists and expanded:
children = [
await self.async_browse_media_favorites(list_id, False)
for list_id in self._tv.favorite_lists
]
else:
children = None
return BrowseMedia(
title="Favorites",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="favorite_lists",
media_content_type=MEDIA_TYPE_CHANNELS,
children_media_class=MEDIA_CLASS_CHANNEL,
can_play=False,
can_expand=True,
children=children,
)
async def async_browse_media_root(self):
"""Return root media objects."""
return BrowseMedia(
title="Library",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="",
media_content_type="",
can_play=False,
can_expand=True,
children=[
await self.async_browse_media_channels(False),
await self.async_browse_media_applications(False),
await self.async_browse_media_favorite_lists(False),
],
)
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if not self._tv.on:
raise BrowseError("Can't browse when tv is turned off")
if media_content_id in (None, ""):
return await self.async_browse_media_root()
path = media_content_id.partition("/")
if path[0] == "channels":
return await self.async_browse_media_channels(True)
if path[0] == "applications":
return await self.async_browse_media_applications(True)
if path[0] == "favorite_lists":
return await self.async_browse_media_favorite_lists(True)
if path[0] == "favorites":
return await self.async_browse_media_favorites(path[2], True)
raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}")
async def async_get_browse_image(
self, media_content_type, media_content_id, media_image_id=None
):
"""Serve album art. Returns (content, content_type)."""
try:
if media_content_type == MEDIA_TYPE_APP and media_content_id:
return await self._tv.getApplicationIcon(media_content_id)
if media_content_type == MEDIA_TYPE_CHANNEL and media_content_id:
return await self._tv.getChannelLogo(media_content_id)
except ConnectionFailure:
_LOGGER.warning("Failed to fetch image")
return None, None
async def async_get_media_image(self):
"""Serve album art. Returns (content, content_type)."""
return await self.async_get_browse_image(
self.media_content_type, self.media_content_id, None
)
@callback
def _update_from_coordinator(self):
if self._tv.on:
if self._tv.powerstate in ("Standby", "StandbyKeep"):
self._state = STATE_OFF
else:
self._state = STATE_ON
else:
self._state = STATE_OFF
self._sources = {
srcid: source.get("name") or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
if self._tv.channel_active:
self._media_content_type = MEDIA_TYPE_CHANNEL
self._media_content_id = f"all/{self._tv.channel_id}"
self._media_title = self._tv.channels.get(self._tv.channel_id, {}).get(
"name"
)
self._media_channel = self._media_title
elif self._tv.application_id:
self._media_content_type = MEDIA_TYPE_APP
self._media_content_id = self._tv.application_id
self._media_title = self._tv.applications.get(
self._tv.application_id, {}
).get("label")
self._media_channel = None
else:
self._media_content_type = None
self._media_content_id = None
self._media_title = self._sources.get(self._tv.source_id)
self._media_channel = None
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._update_from_coordinator()
super()._handle_coordinator_update()
| |
import datetime
from nose.tools import eq_, ok_
import mock
from django.conf import settings
from django.contrib.auth.models import Group
from django.utils import timezone
from funfactory.urlresolvers import reverse
from airmozilla.main.models import (
Event,
EventTweet,
Location,
Approval
)
from .base import ManageTestCase
from airmozilla.base.tests.test_utils import Response
class TestEventTweets(ManageTestCase):
event_base_data = {
'status': Event.STATUS_SCHEDULED,
'description': '...',
'privacy': 'public',
'location': '1',
'channels': '1',
'tags': 'xxx',
'template': '1',
'start_time': '2012-3-4 12:00',
'estimated_duration': '3600',
'timezone': 'US/Pacific'
}
placeholder = 'airmozilla/manage/tests/firefox.png'
@mock.patch('requests.get')
def test_prepare_new_tweet(self, rget):
def mocked_read(url, params):
assert url == settings.BITLY_URL
return Response({
u'status_code': 200,
u'data': {
u'url': u'http://mzl.la/1adh2wT',
u'hash': u'1adh2wT',
u'global_hash': u'1adh2wU',
u'long_url': u'https://air.mozilla.org/it-buildout/',
u'new_hash': 0
},
u'status_txt': u'OK'
})
rget.side_effect = mocked_read
event = Event.objects.get(title='Test event')
# the event must have a real placeholder image
with open(self.placeholder) as fp:
response = self.client.post(
reverse('manage:event_edit', args=(event.pk,)),
dict(self.event_base_data,
title=event.title,
short_description="Check out <b>This!</b>",
description="Something longer",
placeholder_img=fp)
)
assert response.status_code == 302, response.status_code
# on the edit page, there should be a link
response = self.client.get(
reverse('manage:event_edit', args=(event.pk,))
)
assert response.status_code == 200
url = reverse('manage:new_event_tweet', args=(event.pk,))
ok_(url in response.content)
response = self.client.get(url)
eq_(response.status_code, 200)
textarea = (
response.content
.split('<textarea')[1]
.split('>')[1]
.split('</textarea')[0]
)
ok_(textarea.strip().startswith('Check out This!'))
event = Event.objects.get(pk=event.pk)
event_url = 'http://testserver'
event_url += reverse('main:event', args=(event.slug,))
ok_('http://mzl.la/1adh2wT' in textarea)
ok_(event_url not in textarea)
# Sometimes, due to...
# https://bugzilla.mozilla.org/show_bug.cgi?id=1167211
# the session is cleared out here in this test, so we
# really make sure we're signed in
assert self.client.login(username='fake', password='fake')
assert self.client.session.items()
# load the form
response = self.client.get(url)
eq_(response.status_code, 200)
# try to submit it with longer than 140 characters
response = self.client.post(url, {
'text': 'x' * 141,
'include_placeholder': True,
})
eq_(response.status_code, 200)
assert not EventTweet.objects.all().count()
ok_('it has 141' in response.content)
# try again
response = self.client.post(url, {
'text': 'Bla bla #tag',
'include_placeholder': True,
})
eq_(response.status_code, 302)
ok_(EventTweet.objects.all().count())
now = timezone.now()
event_tweet, = EventTweet.objects.all()
_fmt = '%Y%m%d%H%M'
eq_(
event_tweet.send_date.strftime(_fmt),
now.strftime(_fmt)
)
ok_(not event_tweet.sent_date)
ok_(not event_tweet.error)
ok_(not event_tweet.tweet_id)
def test_event_tweets_empty(self):
event = Event.objects.get(title='Test event')
url = reverse('manage:event_tweets', args=(event.pk,))
response = self.client.get(url)
eq_(response.status_code, 200)
def test_event_tweets_states(self):
event = Event.objects.get(title='Test event')
assert event in Event.objects.approved()
group = Group.objects.create(name='testapprover')
Approval.objects.create(
event=event,
group=group,
)
assert event not in Event.objects.approved()
url = reverse('manage:event_tweets', args=(event.pk,))
response = self.client.get(url)
eq_(response.status_code, 200)
tweet = EventTweet.objects.create(
event=event,
text='Bla bla',
send_date=timezone.now(),
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_('Needs to be approved first' in response.content)
from airmozilla.main.helpers import js_date
ok_(
js_date(tweet.send_date.replace(microsecond=0))
not in response.content
)
# also check that 'Bla bla' is shown on the Edit Event page
edit_url = reverse('manage:event_edit', args=(event.pk,))
response = self.client.get(edit_url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
tweet.tweet_id = '1234567890'
tweet.sent_date = (
timezone.now()
- datetime.timedelta(days=1)
)
tweet.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_(
'https://twitter.com/%s/status/1234567890'
% settings.TWITTER_USERNAME
in response.content
)
ok_(
js_date(tweet.sent_date.replace(microsecond=0))
in response.content
)
tweet.tweet_id = None
tweet.error = "Some error"
tweet.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_(
'https://twitter.com/%s/status/1234567890'
% settings.TWITTER_USERNAME
not in response.content
)
ok_(
js_date(tweet.sent_date.replace(microsecond=0))
in response.content
)
ok_('Failed to send' in response.content)
def test_all_event_tweets_states(self):
event = Event.objects.get(title='Test event')
assert event in Event.objects.approved()
group = Group.objects.create(name='testapprover')
Approval.objects.create(
event=event,
group=group,
)
assert event not in Event.objects.approved()
url = reverse('manage:all_event_tweets')
response = self.client.get(url)
eq_(response.status_code, 200)
tweet = EventTweet.objects.create(
event=event,
text='Bla bla',
send_date=timezone.now(),
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_('Needs to be approved first' in response.content)
from airmozilla.main.helpers import js_date
ok_(
js_date(tweet.send_date.replace(microsecond=0))
not in response.content
)
# also check that 'Bla bla' is shown on the Edit Event page
edit_url = reverse('manage:event_edit', args=(event.pk,))
response = self.client.get(edit_url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
tweet.tweet_id = '1234567890'
tweet.sent_date = (
timezone.now()
- datetime.timedelta(days=1)
)
tweet.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_(
'https://twitter.com/%s/status/1234567890'
% settings.TWITTER_USERNAME
in response.content
)
ok_(
js_date(tweet.sent_date.replace(microsecond=0))
in response.content
)
tweet.tweet_id = None
tweet.error = "Some error"
tweet.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Bla bla' in response.content)
ok_(
'https://twitter.com/%s/status/1234567890'
% settings.TWITTER_USERNAME
not in response.content
)
ok_(
js_date(tweet.sent_date.replace(microsecond=0))
in response.content
)
ok_('Failed to send' in response.content)
@mock.patch('airmozilla.manage.views.events.send_tweet')
def test_force_send_now(self, mocked_send_tweet):
event = Event.objects.get(title='Test event')
tweet = EventTweet.objects.create(
event=event,
text='Bla bla',
send_date=timezone.now(),
)
def mock_send_tweet(event_tweet):
event_tweet.tweet_id = '1234567890'
event_tweet.save()
mocked_send_tweet.side_effect = mock_send_tweet
url = reverse('manage:event_tweets', args=(event.pk,))
response = self.client.post(url, {
'send': tweet.pk,
})
eq_(response.status_code, 302)
tweet = EventTweet.objects.get(pk=tweet.pk)
eq_(tweet.tweet_id, '1234567890')
def test_view_tweet_error(self):
event = Event.objects.get(title='Test event')
tweet = EventTweet.objects.create(
event=event,
text='Bla bla',
send_date=timezone.now(),
error='Crap!'
)
url = reverse('manage:event_tweets', args=(event.pk,))
response = self.client.post(url, {
'error': tweet.pk,
})
eq_(response.status_code, 200)
eq_(response['content-type'], 'text/plain')
ok_('Crap!' in response.content)
def test_cancel_event_tweet(self):
event = Event.objects.get(title='Test event')
tweet = EventTweet.objects.create(
event=event,
text='Bla bla',
send_date=timezone.now(),
)
url = reverse('manage:event_tweets', args=(event.pk,))
response = self.client.post(url, {
'cancel': tweet.pk,
})
eq_(response.status_code, 302)
ok_(not EventTweet.objects.all().count())
def test_create_event_tweet_with_location_timezone(self):
location = Location.objects.create(
name='Paris',
timezone='Europe/Paris'
)
event = Event.objects.get(title='Test event')
event.location = location
event.save()
# the event must have a real placeholder image
with open(self.placeholder) as fp:
response = self.client.post(
reverse('manage:event_edit', args=(event.pk,)),
dict(self.event_base_data,
title=event.title,
short_description="Check out <b>This!</b>",
description="Something longer",
placeholder_img=fp)
)
assert response.status_code == 302, response.status_code
url = reverse('manage:new_event_tweet', args=(event.pk,))
now = datetime.datetime.utcnow()
response = self.client.post(url, {
'text': 'Bla bla #tag',
'include_placeholder': True,
'send_date': now.strftime('%Y-%m-%d 12:00'),
})
eq_(response.status_code, 302)
event_tweet, = EventTweet.objects.all()
# we specified it as noon in Paris, but the save time
# will be UTC
ok_(event_tweet.send_date.hour != 12)
assert event_tweet.send_date.strftime('%Z') == 'UTC'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.