repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
mderomph-coolblue/dd-agent | utils/ntp.py | 12 | 1318 | # stdlib
import os
import random
# project
from config import check_yaml, get_confd_path
user_ntp_settings = {}
DEFAULT_VERSION = 3
DEFAULT_TIMEOUT = 1 # in seconds
DEFAULT_PORT = "ntp"
def set_user_ntp_settings(instance=None):
global user_ntp_settings
if instance is None:
try:
ntp_check_config = check_yaml(os.path.join(get_confd_path(), 'ntp.yaml'))
instance = ntp_check_config['instances'][0]
except Exception:
instance = {}
user_ntp_settings = instance
def get_ntp_host(subpool=None):
"""
Returns randomly a NTP hostname of our vendor pool. Or
a given subpool if given in input.
"""
if user_ntp_settings.get('host') is not None:
return user_ntp_settings['host']
subpool = subpool or random.randint(0, 3)
return "{0}.datadog.pool.ntp.org".format(subpool)
def get_ntp_port():
return user_ntp_settings.get('port') or DEFAULT_PORT
def get_ntp_version():
return int(user_ntp_settings.get("version") or DEFAULT_VERSION)
def get_ntp_timeout():
return float(user_ntp_settings.get('timeout') or DEFAULT_TIMEOUT)
def get_ntp_args():
return {
'host': get_ntp_host(),
'port': get_ntp_port(),
'version': get_ntp_version(),
'timeout': get_ntp_timeout(),
}
| bsd-3-clause |
blueburningcoder/nupic | tests/unit/nupic/research/spatial_pooler_compute_test.py | 35 | 4309 | #! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import time
import numpy
import unittest2 as unittest
from nupic.support.unittesthelpers.algorithm_test_helpers \
import getNumpyRandomGenerator, convertSP, CreateSP
from nupic.bindings.math import (count_gte,
GetNTAReal,
SM_01_32_32 as SparseBinaryMatrix,
SM32 as SparseMatrix)
from nupic.research.spatial_pooler import SpatialPooler
uintType = "uint32"
class SpatialPoolerComputeTest(unittest.TestCase):
"""
End to end tests of the compute function for the SpatialPooler class with no
mocking anywhere.
"""
def basicComputeLoop(self, imp, params, inputSize, columnDimensions,
seed = None):
"""
Feed in some vectors and retrieve outputs. Ensure the right number of
columns win, that we always get binary outputs, and that nothing crashes.
"""
sp = CreateSP(imp,params)
# Create a set of input vectors as well as various numpy vectors we will
# need to retrieve data from the SP
numRecords = 100
randomState = getNumpyRandomGenerator(seed)
inputMatrix = (
randomState.rand(numRecords,inputSize) > 0.8).astype(uintType)
y = numpy.zeros(columnDimensions, dtype = uintType)
dutyCycles = numpy.zeros(columnDimensions, dtype = uintType)
# With learning on we should get the requested number of winners
for v in inputMatrix:
y.fill(0)
sp.compute(v, True, y)
self.assertEqual(sp.getNumActiveColumnsPerInhArea(),y.sum())
self.assertEqual(0,y.min())
self.assertEqual(1,y.max())
# With learning off and some prior training we should get the requested
# number of winners
for v in inputMatrix:
y.fill(0)
sp.compute(v, False, y)
self.assertEqual(sp.getNumActiveColumnsPerInhArea(),y.sum())
self.assertEqual(0,y.min())
self.assertEqual(1,y.max())
def testBasicCompute1(self):
"""
Run basicComputeLoop with mostly default parameters
"""
# Size of each input vector
inputSize = 30
# Size of each output SDR vector
columnDimensions = 50
params = {
"inputDimensions": [inputSize],
"columnDimensions": [columnDimensions],
"potentialRadius": inputSize,
'globalInhibition': True,
"seed": int((time.time()%10000)*10),
}
print "testBasicCompute1, SP seed set to:",params['seed']
self.basicComputeLoop('py', params, inputSize, columnDimensions)
self.basicComputeLoop('cpp', params, inputSize, columnDimensions)
def testBasicCompute2(self):
"""
Run basicComputeLoop with learning turned off.
"""
# Size of each input vector
inputSize = 100
# Size of each output SDR vector
columnDimensions = 100
params = {
"inputDimensions": [inputSize],
"columnDimensions": [columnDimensions],
"potentialRadius": inputSize,
'globalInhibition': True,
"synPermActiveInc": 0.0,
"synPermInactiveDec": 0.0,
"seed": int((time.time()%10000)*10),
}
print "testBasicCompute2, SP seed set to:",params['seed']
self.basicComputeLoop('py', params, inputSize, columnDimensions)
self.basicComputeLoop('cpp', params, inputSize, columnDimensions)
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
HydrelioxGitHub/home-assistant | homeassistant/components/freebox/__init__.py | 2 | 2607 | """Support for Freebox devices (Freebox v6 and Freebox mini 4K)."""
import logging
import socket
import voluptuous as vol
from homeassistant.components.discovery import SERVICE_FREEBOX
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers import config_validation as cv, discovery
from homeassistant.helpers.discovery import async_load_platform
REQUIREMENTS = ['aiofreepybox==0.0.6']
_LOGGER = logging.getLogger(__name__)
DOMAIN = "freebox"
DATA_FREEBOX = DOMAIN
FREEBOX_CONFIG_FILE = 'freebox.conf'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
})
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Set up the Freebox component."""
conf = config.get(DOMAIN)
async def discovery_dispatch(service, discovery_info):
if conf is None:
host = discovery_info.get('properties', {}).get('api_domain')
port = discovery_info.get('properties', {}).get('https_port')
_LOGGER.info("Discovered Freebox server: %s:%s", host, port)
await async_setup_freebox(hass, config, host, port)
discovery.async_listen(hass, SERVICE_FREEBOX, discovery_dispatch)
if conf is not None:
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
await async_setup_freebox(hass, config, host, port)
return True
async def async_setup_freebox(hass, config, host, port):
"""Start up the Freebox component platforms."""
from aiofreepybox import Freepybox
from aiofreepybox.exceptions import HttpRequestError
app_desc = {
'app_id': 'hass',
'app_name': 'Home Assistant',
'app_version': '0.65',
'device_name': socket.gethostname()
}
token_file = hass.config.path(FREEBOX_CONFIG_FILE)
api_version = 'v1'
fbx = Freepybox(
app_desc=app_desc,
token_file=token_file,
api_version=api_version)
try:
await fbx.open(host, port)
except HttpRequestError:
_LOGGER.exception('Failed to connect to Freebox')
else:
hass.data[DATA_FREEBOX] = fbx
hass.async_create_task(async_load_platform(
hass, 'sensor', DOMAIN, {}, config))
hass.async_create_task(async_load_platform(
hass, 'device_tracker', DOMAIN, {}, config))
async def close_fbx(event):
"""Close Freebox connection on HA Stop."""
await fbx.close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_fbx)
| apache-2.0 |
gsanchez1117/test2 | lib/werkzeug/contrib/testtools.py | 365 | 2453 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.testtools
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module implements extended wrappers for simplified testing.
`TestResponse`
A response wrapper which adds various cached attributes for
simplified assertions on various content types.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.utils import cached_property, import_string
from werkzeug.wrappers import Response
from warnings import warn
warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and '
'will be removed with Werkzeug 1.0'))
class ContentAccessors(object):
"""
A mixin class for response objects that provides a couple of useful
accessors for unittesting.
"""
def xml(self):
"""Get an etree if possible."""
if 'xml' not in self.mimetype:
raise AttributeError(
'Not a XML response (Content-Type: %s)'
% self.mimetype)
for module in ['xml.etree.ElementTree', 'ElementTree',
'elementtree.ElementTree']:
etree = import_string(module, silent=True)
if etree is not None:
return etree.XML(self.body)
raise RuntimeError('You must have ElementTree installed '
'to use TestResponse.xml')
xml = cached_property(xml)
def lxml(self):
"""Get an lxml etree if possible."""
if ('html' not in self.mimetype and 'xml' not in self.mimetype):
raise AttributeError('Not an HTML/XML response')
from lxml import etree
try:
from lxml.html import fromstring
except ImportError:
fromstring = etree.HTML
if self.mimetype == 'text/html':
return fromstring(self.data)
return etree.XML(self.data)
lxml = cached_property(lxml)
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.mimetype:
raise AttributeError('Not a JSON response')
try:
from simplejson import loads
except ImportError:
from json import loads
return loads(self.data)
json = cached_property(json)
class TestResponse(Response, ContentAccessors):
"""Pass this to `werkzeug.test.Client` for easier unittesting."""
| apache-2.0 |
WholeGrainGoats/servo | tests/wpt/css-tests/tools/py/testing/path/test_local.py | 160 | 29652 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import py
import pytest
import os, sys
from py.path import local
import common
failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
failsonjywin32 = py.test.mark.xfail("sys.platform.startswith('java') "
"and getattr(os, '_name', None) == 'nt'")
win32only = py.test.mark.skipif(
"not (sys.platform == 'win32' or getattr(os, '_name', None) == 'nt')")
skiponwin32 = py.test.mark.skipif(
"sys.platform == 'win32' or getattr(os, '_name', None) == 'nt'")
def pytest_funcarg__path1(request):
def setup():
path1 = request.getfuncargvalue("tmpdir")
common.setuptestfs(path1)
return path1
def teardown(path1):
# post check
assert path1.join("samplefile").check()
return request.cached_setup(setup, teardown, scope="session")
class TestLocalPath(common.CommonFSTests):
def test_join_normpath(self, tmpdir):
assert tmpdir.join(".") == tmpdir
p = tmpdir.join("../%s" % tmpdir.basename)
assert p == tmpdir
p = tmpdir.join("..//%s/" % tmpdir.basename)
assert p == tmpdir
@skiponwin32
def test_dirpath_abs_no_abs(self, tmpdir):
p = tmpdir.join('foo')
assert p.dirpath('/bar') == tmpdir.join('bar')
assert tmpdir.dirpath('/bar', abs=True) == py.path.local('/bar')
def test_gethash(self, tmpdir):
md5 = py.builtin._tryimport('md5', 'hashlib').md5
lib = py.builtin._tryimport('sha', 'hashlib')
sha = getattr(lib, 'sha1', getattr(lib, 'sha', None))
fn = tmpdir.join("testhashfile")
data = 'hello'.encode('ascii')
fn.write(data, mode="wb")
assert fn.computehash("md5") == md5(data).hexdigest()
assert fn.computehash("sha1") == sha(data).hexdigest()
py.test.raises(ValueError, fn.computehash, "asdasd")
def test_remove_removes_readonly_file(self, tmpdir):
readonly_file = tmpdir.join('readonly').ensure()
readonly_file.chmod(0)
readonly_file.remove()
assert not readonly_file.check(exists=1)
def test_remove_removes_readonly_dir(self, tmpdir):
readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
readonly_dir.chmod(int("500", 8))
readonly_dir.remove()
assert not readonly_dir.check(exists=1)
def test_remove_removes_dir_and_readonly_file(self, tmpdir):
readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
readonly_file = readonly_dir.join('readonlyfile').ensure()
readonly_file.chmod(0)
readonly_dir.remove()
assert not readonly_dir.check(exists=1)
def test_remove_routes_ignore_errors(self, tmpdir, monkeypatch):
l = []
monkeypatch.setattr(py.std.shutil, 'rmtree',
lambda *args, **kwargs: l.append(kwargs))
tmpdir.remove()
assert not l[0]['ignore_errors']
for val in (True, False):
l[:] = []
tmpdir.remove(ignore_errors=val)
assert l[0]['ignore_errors'] == val
def test_initialize_curdir(self):
assert str(local()) == py.std.os.getcwd()
@skiponwin32
def test_chdir_gone(self, path1):
p = path1.ensure("dir_to_be_removed", dir=1)
p.chdir()
p.remove()
pytest.raises(py.error.ENOENT, py.path.local)
assert path1.chdir() is None
assert os.getcwd() == str(path1)
def test_as_cwd(self, path1):
dir = path1.ensure("subdir", dir=1)
old = py.path.local()
with dir.as_cwd() as x:
assert x == old
assert py.path.local() == dir
assert os.getcwd() == str(old)
def test_as_cwd_exception(self, path1):
old = py.path.local()
dir = path1.ensure("subdir", dir=1)
with pytest.raises(ValueError):
with dir.as_cwd():
raise ValueError()
assert old == py.path.local()
def test_initialize_reldir(self, path1):
with path1.as_cwd():
p = local('samplefile')
assert p.check()
@pytest.mark.xfail("sys.version_info < (2,6) and sys.platform == 'win32'")
def test_tilde_expansion(self, monkeypatch, tmpdir):
monkeypatch.setenv("HOME", str(tmpdir))
p = py.path.local("~", expanduser=True)
assert p == os.path.expanduser("~")
def test_eq_with_strings(self, path1):
path1 = path1.join('sampledir')
path2 = str(path1)
assert path1 == path2
assert path2 == path1
path3 = path1.join('samplefile')
assert path3 != path2
assert path2 != path3
def test_eq_with_none(self, path1):
assert path1 != None
def test_gt_with_strings(self, path1):
path2 = path1.join('sampledir')
path3 = str(path1.join("ttt"))
assert path3 > path2
assert path2 < path3
assert path2 < "ttt"
assert "ttt" > path2
path4 = path1.join("aaa")
l = [path2, path4,path3]
assert sorted(l) == [path4, path2, path3]
def test_open_and_ensure(self, path1):
p = path1.join("sub1", "sub2", "file")
with p.open("w", ensure=1) as f:
f.write("hello")
assert p.read() == "hello"
def test_write_and_ensure(self, path1):
p = path1.join("sub1", "sub2", "file")
p.write("hello", ensure=1)
assert p.read() == "hello"
@py.test.mark.multi(bin=(False, True))
def test_dump(self, tmpdir, bin):
path = tmpdir.join("dumpfile%s" % int(bin))
try:
d = {'answer' : 42}
path.dump(d, bin=bin)
f = path.open('rb+')
dnew = py.std.pickle.load(f)
assert d == dnew
finally:
f.close()
@failsonjywin32
def test_setmtime(self):
import tempfile
import time
try:
fd, name = tempfile.mkstemp()
py.std.os.close(fd)
except AttributeError:
name = tempfile.mktemp()
open(name, 'w').close()
try:
mtime = int(time.time())-100
path = local(name)
assert path.mtime() != mtime
path.setmtime(mtime)
assert path.mtime() == mtime
path.setmtime()
assert path.mtime() != mtime
finally:
py.std.os.remove(name)
def test_normpath(self, path1):
new1 = path1.join("/otherdir")
new2 = path1.join("otherdir")
assert str(new1) == str(new2)
def test_mkdtemp_creation(self):
d = local.mkdtemp()
try:
assert d.check(dir=1)
finally:
d.remove(rec=1)
def test_tmproot(self):
d = local.mkdtemp()
tmproot = local.get_temproot()
try:
assert d.check(dir=1)
assert d.dirpath() == tmproot
finally:
d.remove(rec=1)
def test_chdir(self, tmpdir):
old = local()
try:
res = tmpdir.chdir()
assert str(res) == str(old)
assert py.std.os.getcwd() == str(tmpdir)
finally:
old.chdir()
def test_ensure_filepath_withdir(self, tmpdir):
newfile = tmpdir.join('test1','test')
newfile.ensure()
assert newfile.check(file=1)
newfile.write("42")
newfile.ensure()
s = newfile.read()
assert s == "42"
def test_ensure_filepath_withoutdir(self, tmpdir):
newfile = tmpdir.join('test1file')
t = newfile.ensure()
assert t == newfile
assert newfile.check(file=1)
def test_ensure_dirpath(self, tmpdir):
newfile = tmpdir.join('test1','testfile')
t = newfile.ensure(dir=1)
assert t == newfile
assert newfile.check(dir=1)
def test_init_from_path(self, tmpdir):
l = local()
l2 = local(l)
assert l2 == l
wc = py.path.svnwc('.')
l3 = local(wc)
assert l3 is not wc
assert l3.strpath == wc.strpath
assert not hasattr(l3, 'commit')
@py.test.mark.xfail(run=False, reason="unreliable est for long filenames")
def test_long_filenames(self, tmpdir):
if sys.platform == "win32":
py.test.skip("win32: work around needed for path length limit")
# see http://codespeak.net/pipermail/py-dev/2008q2/000922.html
# testing paths > 260 chars (which is Windows' limitation, but
# depending on how the paths are used), but > 4096 (which is the
# Linux' limitation) - the behaviour of paths with names > 4096 chars
# is undetermined
newfilename = '/test' * 60
l = tmpdir.join(newfilename)
l.ensure(file=True)
l.write('foo')
l2 = tmpdir.join(newfilename)
assert l2.read() == 'foo'
def test_visit_depth_first(self, tmpdir):
p1 = tmpdir.ensure("a","1")
p2 = tmpdir.ensure("b","2")
p3 = tmpdir.ensure("breadth")
l = list(tmpdir.visit(lambda x: x.check(file=1)))
assert len(l) == 3
# check that breadth comes last
assert l[2] == p3
def test_visit_rec_fnmatch(self, tmpdir):
p1 = tmpdir.ensure("a","123")
p2 = tmpdir.ensure(".b","345")
l = list(tmpdir.visit("???", rec="[!.]*"))
assert len(l) == 1
# check that breadth comes last
assert l[0] == p1
def test_fnmatch_file_abspath(self, tmpdir):
b = tmpdir.join("a", "b")
assert b.fnmatch(os.sep.join("ab"))
pattern = os.sep.join([str(tmpdir), "*", "b"])
assert b.fnmatch(pattern)
def test_sysfind(self):
name = sys.platform == "win32" and "cmd" or "test"
x = py.path.local.sysfind(name)
assert x.check(file=1)
assert py.path.local.sysfind('jaksdkasldqwe') is None
assert py.path.local.sysfind(name, paths=[]) is None
x2 = py.path.local.sysfind(name, paths=[x.dirpath()])
assert x2 == x
class TestExecutionOnWindows:
pytestmark = win32only
def test_sysfind_bat_exe_before(self, tmpdir, monkeypatch):
monkeypatch.setenv("PATH", str(tmpdir), prepend=os.pathsep)
tmpdir.ensure("hello")
h = tmpdir.ensure("hello.bat")
x = py.path.local.sysfind("hello")
assert x == h
class TestExecution:
pytestmark = skiponwin32
def test_sysfind_no_permisson_ignored(self, monkeypatch, tmpdir):
noperm = tmpdir.ensure('noperm', dir=True)
monkeypatch.setenv("PATH", noperm, prepend=":")
noperm.chmod(0)
assert py.path.local.sysfind('jaksdkasldqwe') is None
def test_sysfind_absolute(self):
x = py.path.local.sysfind('test')
assert x.check(file=1)
y = py.path.local.sysfind(str(x))
assert y.check(file=1)
assert y == x
def test_sysfind_multiple(self, tmpdir, monkeypatch):
monkeypatch.setenv('PATH',
"%s:%s" % (tmpdir.ensure('a'),
tmpdir.join('b')),
prepend=":")
tmpdir.ensure('b', 'a')
checker = lambda x: x.dirpath().basename == 'b'
x = py.path.local.sysfind('a', checker=checker)
assert x.basename == 'a'
assert x.dirpath().basename == 'b'
checker = lambda x: None
assert py.path.local.sysfind('a', checker=checker) is None
def test_sysexec(self):
x = py.path.local.sysfind('ls')
out = x.sysexec('-a')
for x in py.path.local().listdir():
assert out.find(x.basename) != -1
def test_sysexec_failing(self):
x = py.path.local.sysfind('false')
py.test.raises(py.process.cmdexec.Error, """
x.sysexec('aksjdkasjd')
""")
def test_make_numbered_dir(self, tmpdir):
tmpdir.ensure('base.not_an_int', dir=1)
for i in range(10):
numdir = local.make_numbered_dir(prefix='base.', rootdir=tmpdir,
keep=2, lock_timeout=0)
assert numdir.check()
assert numdir.basename == 'base.%d' %i
if i>=1:
assert numdir.new(ext=str(i-1)).check()
if i>=2:
assert numdir.new(ext=str(i-2)).check()
if i>=3:
assert not numdir.new(ext=str(i-3)).check()
def test_make_numbered_dir_NotImplemented_Error(self, tmpdir, monkeypatch):
def notimpl(x, y):
raise NotImplementedError(42)
monkeypatch.setattr(py.std.os, 'symlink', notimpl)
x = tmpdir.make_numbered_dir(rootdir=tmpdir, lock_timeout=0)
assert x.relto(tmpdir)
assert x.check()
def test_locked_make_numbered_dir(self, tmpdir):
for i in range(10):
numdir = local.make_numbered_dir(prefix='base2.', rootdir=tmpdir,
keep=2)
assert numdir.check()
assert numdir.basename == 'base2.%d' %i
for j in range(i):
assert numdir.new(ext=str(j)).check()
def test_error_preservation(self, path1):
py.test.raises (EnvironmentError, path1.join('qwoeqiwe').mtime)
py.test.raises (EnvironmentError, path1.join('qwoeqiwe').read)
#def test_parentdirmatch(self):
# local.parentdirmatch('std', startmodule=__name__)
#
class TestImport:
def test_pyimport(self, path1):
obj = path1.join('execfile.py').pyimport()
assert obj.x == 42
assert obj.__name__ == 'execfile'
def test_pyimport_renamed_dir_creates_mismatch(self, tmpdir):
p = tmpdir.ensure("a", "test_x123.py")
p.pyimport()
tmpdir.join("a").move(tmpdir.join("b"))
pytest.raises(tmpdir.ImportMismatchError,
lambda: tmpdir.join("b", "test_x123.py").pyimport())
def test_pyimport_messy_name(self, tmpdir):
# http://bitbucket.org/hpk42/py-trunk/issue/129
path = tmpdir.ensure('foo__init__.py')
obj = path.pyimport()
def test_pyimport_dir(self, tmpdir):
p = tmpdir.join("hello_123")
p_init = p.ensure("__init__.py")
m = p.pyimport()
assert m.__name__ == "hello_123"
m = p_init.pyimport()
assert m.__name__ == "hello_123"
def test_pyimport_execfile_different_name(self, path1):
obj = path1.join('execfile.py').pyimport(modname="0x.y.z")
assert obj.x == 42
assert obj.__name__ == '0x.y.z'
def test_pyimport_a(self, path1):
otherdir = path1.join('otherdir')
mod = otherdir.join('a.py').pyimport()
assert mod.result == "got it"
assert mod.__name__ == 'otherdir.a'
def test_pyimport_b(self, path1):
otherdir = path1.join('otherdir')
mod = otherdir.join('b.py').pyimport()
assert mod.stuff == "got it"
assert mod.__name__ == 'otherdir.b'
def test_pyimport_c(self, path1):
otherdir = path1.join('otherdir')
mod = otherdir.join('c.py').pyimport()
assert mod.value == "got it"
def test_pyimport_d(self, path1):
otherdir = path1.join('otherdir')
mod = otherdir.join('d.py').pyimport()
assert mod.value2 == "got it"
def test_pyimport_and_import(self, tmpdir):
tmpdir.ensure('xxxpackage', '__init__.py')
mod1path = tmpdir.ensure('xxxpackage', 'module1.py')
mod1 = mod1path.pyimport()
assert mod1.__name__ == 'xxxpackage.module1'
from xxxpackage import module1
assert module1 is mod1
def test_pyimport_check_filepath_consistency(self, monkeypatch, tmpdir):
name = 'pointsback123'
ModuleType = type(py.std.os)
p = tmpdir.ensure(name + '.py')
for ending in ('.pyc', '$py.class', '.pyo'):
mod = ModuleType(name)
pseudopath = tmpdir.ensure(name+ending)
mod.__file__ = str(pseudopath)
monkeypatch.setitem(sys.modules, name, mod)
newmod = p.pyimport()
assert mod == newmod
monkeypatch.undo()
mod = ModuleType(name)
pseudopath = tmpdir.ensure(name+"123.py")
mod.__file__ = str(pseudopath)
monkeypatch.setitem(sys.modules, name, mod)
excinfo = py.test.raises(pseudopath.ImportMismatchError,
"p.pyimport()")
modname, modfile, orig = excinfo.value.args
assert modname == name
assert modfile == pseudopath
assert orig == p
assert issubclass(pseudopath.ImportMismatchError, ImportError)
def test_issue131_pyimport_on__init__(self, tmpdir):
# __init__.py files may be namespace packages, and thus the
# __file__ of an imported module may not be ourselves
# see issue
p1 = tmpdir.ensure("proja", "__init__.py")
p2 = tmpdir.ensure("sub", "proja", "__init__.py")
m1 = p1.pyimport()
m2 = p2.pyimport()
assert m1 == m2
def test_ensuresyspath_append(self, tmpdir):
root1 = tmpdir.mkdir("root1")
file1 = root1.ensure("x123.py")
assert str(root1) not in sys.path
file1.pyimport(ensuresyspath="append")
assert str(root1) == sys.path[-1]
assert str(root1) not in sys.path[:-1]
def test_pypkgdir(tmpdir):
pkg = tmpdir.ensure('pkg1', dir=1)
pkg.ensure("__init__.py")
pkg.ensure("subdir/__init__.py")
assert pkg.pypkgpath() == pkg
assert pkg.join('subdir', '__init__.py').pypkgpath() == pkg
def test_pypkgdir_unimportable(tmpdir):
pkg = tmpdir.ensure('pkg1-1', dir=1) # unimportable
pkg.ensure("__init__.py")
subdir = pkg.ensure("subdir/__init__.py").dirpath()
assert subdir.pypkgpath() == subdir
assert subdir.ensure("xyz.py").pypkgpath() == subdir
assert not pkg.pypkgpath()
def test_isimportable():
from py._path.local import isimportable
assert not isimportable("")
assert isimportable("x")
assert isimportable("x1")
assert isimportable("x_1")
assert isimportable("_")
assert isimportable("_1")
assert not isimportable("x-1")
assert not isimportable("x:1")
def test_homedir_from_HOME(monkeypatch):
path = os.getcwd()
monkeypatch.setenv("HOME", path)
assert py.path.local._gethomedir() == py.path.local(path)
def test_homedir_not_exists(monkeypatch):
monkeypatch.delenv("HOME", raising=False)
monkeypatch.delenv("HOMEDRIVE", raising=False)
homedir = py.path.local._gethomedir()
assert homedir is None
def test_samefile(tmpdir):
assert tmpdir.samefile(tmpdir)
p = tmpdir.ensure("hello")
assert p.samefile(p)
with p.dirpath().as_cwd():
assert p.samefile(p.basename)
if sys.platform == "win32":
p1 = p.__class__(str(p).lower())
p2 = p.__class__(str(p).upper())
assert p1.samefile(p2)
def test_listdir_single_arg(tmpdir):
tmpdir.ensure("hello")
assert tmpdir.listdir("hello")[0].basename == "hello"
def test_mkdtemp_rootdir(tmpdir):
dtmp = local.mkdtemp(rootdir=tmpdir)
assert tmpdir.listdir() == [dtmp]
class TestWINLocalPath:
pytestmark = win32only
def test_owner_group_not_implemented(self, path1):
py.test.raises(NotImplementedError, "path1.stat().owner")
py.test.raises(NotImplementedError, "path1.stat().group")
def test_chmod_simple_int(self, path1):
py.builtin.print_("path1 is", path1)
mode = path1.stat().mode
# Ensure that we actually change the mode to something different.
path1.chmod(mode == 0 and 1 or 0)
try:
print(path1.stat().mode)
print(mode)
assert path1.stat().mode != mode
finally:
path1.chmod(mode)
assert path1.stat().mode == mode
def test_path_comparison_lowercase_mixed(self, path1):
t1 = path1.join("a_path")
t2 = path1.join("A_path")
assert t1 == t1
assert t1 == t2
def test_relto_with_mixed_case(self, path1):
t1 = path1.join("a_path", "fiLe")
t2 = path1.join("A_path")
assert t1.relto(t2) == "fiLe"
def test_allow_unix_style_paths(self, path1):
t1 = path1.join('a_path')
assert t1 == str(path1) + '\\a_path'
t1 = path1.join('a_path/')
assert t1 == str(path1) + '\\a_path'
t1 = path1.join('dir/a_path')
assert t1 == str(path1) + '\\dir\\a_path'
def test_sysfind_in_currentdir(self, path1):
cmd = py.path.local.sysfind('cmd')
root = cmd.new(dirname='', basename='') # c:\ in most installations
with root.as_cwd():
x = py.path.local.sysfind(cmd.relto(root))
assert x.check(file=1)
def test_fnmatch_file_abspath_posix_pattern_on_win32(self, tmpdir):
# path-matching patterns might contain a posix path separator '/'
# Test that we can match that pattern on windows.
import posixpath
b = tmpdir.join("a", "b")
assert b.fnmatch(posixpath.sep.join("ab"))
pattern = posixpath.sep.join([str(tmpdir), "*", "b"])
assert b.fnmatch(pattern)
class TestPOSIXLocalPath:
pytestmark = skiponwin32
def test_hardlink(self, tmpdir):
linkpath = tmpdir.join('test')
filepath = tmpdir.join('file')
filepath.write("Hello")
nlink = filepath.stat().nlink
linkpath.mklinkto(filepath)
assert filepath.stat().nlink == nlink + 1
def test_symlink_are_identical(self, tmpdir):
filepath = tmpdir.join('file')
filepath.write("Hello")
linkpath = tmpdir.join('test')
linkpath.mksymlinkto(filepath)
assert linkpath.readlink() == str(filepath)
def test_symlink_isfile(self, tmpdir):
linkpath = tmpdir.join('test')
filepath = tmpdir.join('file')
filepath.write("")
linkpath.mksymlinkto(filepath)
assert linkpath.check(file=1)
assert not linkpath.check(link=0, file=1)
assert linkpath.islink()
def test_symlink_relative(self, tmpdir):
linkpath = tmpdir.join('test')
filepath = tmpdir.join('file')
filepath.write("Hello")
linkpath.mksymlinkto(filepath, absolute=False)
assert linkpath.readlink() == "file"
assert filepath.read() == linkpath.read()
def test_symlink_not_existing(self, tmpdir):
linkpath = tmpdir.join('testnotexisting')
assert not linkpath.check(link=1)
assert linkpath.check(link=0)
def test_relto_with_root(self, path1, tmpdir):
y = path1.join('x').relto(py.path.local('/'))
assert y[0] == str(path1)[1]
def test_visit_recursive_symlink(self, tmpdir):
linkpath = tmpdir.join('test')
linkpath.mksymlinkto(tmpdir)
visitor = tmpdir.visit(None, lambda x: x.check(link=0))
assert list(visitor) == [linkpath]
def test_symlink_isdir(self, tmpdir):
linkpath = tmpdir.join('test')
linkpath.mksymlinkto(tmpdir)
assert linkpath.check(dir=1)
assert not linkpath.check(link=0, dir=1)
def test_symlink_remove(self, tmpdir):
linkpath = tmpdir.join('test')
linkpath.mksymlinkto(linkpath) # point to itself
assert linkpath.check(link=1)
linkpath.remove()
assert not linkpath.check()
def test_realpath_file(self, tmpdir):
linkpath = tmpdir.join('test')
filepath = tmpdir.join('file')
filepath.write("")
linkpath.mksymlinkto(filepath)
realpath = linkpath.realpath()
assert realpath.basename == 'file'
def test_owner(self, path1, tmpdir):
from pwd import getpwuid
from grp import getgrgid
stat = path1.stat()
assert stat.path == path1
uid = stat.uid
gid = stat.gid
owner = getpwuid(uid)[0]
group = getgrgid(gid)[0]
assert uid == stat.uid
assert owner == stat.owner
assert gid == stat.gid
assert group == stat.group
def test_stat_helpers(self, tmpdir, monkeypatch):
path1 = tmpdir.ensure("file")
stat1 = path1.stat()
stat2 = tmpdir.stat()
assert stat1.isfile()
assert stat2.isdir()
assert not stat1.islink()
assert not stat2.islink()
def test_stat_non_raising(self, tmpdir):
path1 = tmpdir.join("file")
pytest.raises(py.error.ENOENT, lambda: path1.stat())
res = path1.stat(raising=False)
assert res is None
def test_atime(self, tmpdir):
import time
path = tmpdir.ensure('samplefile')
now = time.time()
atime1 = path.atime()
# we could wait here but timer resolution is very
# system dependent
path.read()
time.sleep(0.01)
atime2 = path.atime()
time.sleep(0.01)
duration = time.time() - now
assert (atime2-atime1) <= duration
def test_commondir(self, path1):
# XXX This is here in local until we find a way to implement this
# using the subversion command line api.
p1 = path1.join('something')
p2 = path1.join('otherthing')
assert p1.common(p2) == path1
assert p2.common(p1) == path1
def test_commondir_nocommon(self, path1):
# XXX This is here in local until we find a way to implement this
# using the subversion command line api.
p1 = path1.join('something')
p2 = py.path.local(path1.sep+'blabla')
assert p1.common(p2) == '/'
def test_join_to_root(self, path1):
root = path1.parts()[0]
assert len(str(root)) == 1
assert str(root.join('a')) == '//a' # posix allows two slashes
def test_join_root_to_root_with_no_abs(self, path1):
nroot = path1.join('/')
assert str(path1) == str(nroot)
assert path1 == nroot
def test_chmod_simple_int(self, path1):
mode = path1.stat().mode
path1.chmod(int(mode/2))
try:
assert path1.stat().mode != mode
finally:
path1.chmod(mode)
assert path1.stat().mode == mode
def test_chmod_rec_int(self, path1):
# XXX fragile test
recfilter = lambda x: x.check(dotfile=0, link=0)
oldmodes = {}
for x in path1.visit(rec=recfilter):
oldmodes[x] = x.stat().mode
path1.chmod(int("772", 8), rec=recfilter)
try:
for x in path1.visit(rec=recfilter):
assert x.stat().mode & int("777", 8) == int("772", 8)
finally:
for x,y in oldmodes.items():
x.chmod(y)
def test_copy_archiving(self, tmpdir):
unicode_fn = u"something-\342\200\223.txt"
f = tmpdir.ensure("a", unicode_fn)
a = f.dirpath()
oldmode = f.stat().mode
newmode = oldmode ^ 1
f.chmod(newmode)
b = tmpdir.join("b")
a.copy(b, mode=True)
assert b.join(f.basename).stat().mode == newmode
@failsonjython
def test_chown_identity(self, path1):
owner = path1.stat().owner
group = path1.stat().group
path1.chown(owner, group)
@failsonjython
def test_chown_dangling_link(self, path1):
owner = path1.stat().owner
group = path1.stat().group
x = path1.join('hello')
x.mksymlinkto('qlwkejqwlek')
try:
path1.chown(owner, group, rec=1)
finally:
x.remove(rec=0)
@failsonjython
def test_chown_identity_rec_mayfail(self, path1):
owner = path1.stat().owner
group = path1.stat().group
path1.chown(owner, group)
class TestUnicodePy2Py3:
def test_join_ensure(self, tmpdir, monkeypatch):
if sys.version_info >= (3,0) and "LANG" not in os.environ:
pytest.skip("cannot run test without locale")
x = py.path.local(tmpdir.strpath)
part = "hällo"
y = x.ensure(part)
assert x.join(part) == y
def test_listdir(self, tmpdir):
if sys.version_info >= (3,0) and "LANG" not in os.environ:
pytest.skip("cannot run test without locale")
x = py.path.local(tmpdir.strpath)
part = "hällo"
y = x.ensure(part)
assert x.listdir(part)[0] == y
@pytest.mark.xfail(reason="changing read/write might break existing usages")
def test_read_write(self, tmpdir):
x = tmpdir.join("hello")
part = py.builtin._totext("hällo", "utf8")
x.write(part)
assert x.read() == part
x.write(part.encode(sys.getdefaultencoding()))
assert x.read() == part.encode(sys.getdefaultencoding())
class TestBinaryAndTextMethods:
def test_read_binwrite(self, tmpdir):
x = tmpdir.join("hello")
part = py.builtin._totext("hällo", "utf8")
part_utf8 = part.encode("utf8")
x.write_binary(part_utf8)
assert x.read_binary() == part_utf8
s = x.read_text(encoding="utf8")
assert s == part
assert py.builtin._istext(s)
def test_read_textwrite(self, tmpdir):
x = tmpdir.join("hello")
part = py.builtin._totext("hällo", "utf8")
part_utf8 = part.encode("utf8")
x.write_text(part, encoding="utf8")
assert x.read_binary() == part_utf8
assert x.read_text(encoding="utf8") == part
def test_default_encoding(self, tmpdir):
x = tmpdir.join("hello")
# Can't use UTF8 as the default encoding (ASCII) doesn't support it
part = py.builtin._totext("hello", "ascii")
x.write_text(part, "ascii")
s = x.read_text("ascii")
assert s == part
assert type(s) == type(part)
| mpl-2.0 |
spawnedc/MeCanBlog | django/contrib/flatpages/models.py | 410 | 1134 | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class FlatPage(models.Model):
url = models.CharField(_('URL'), max_length=100, db_index=True)
title = models.CharField(_('title'), max_length=200)
content = models.TextField(_('content'), blank=True)
enable_comments = models.BooleanField(_('enable comments'))
template_name = models.CharField(_('template name'), max_length=70, blank=True,
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
registration_required = models.BooleanField(_('registration required'), help_text=_("If this is checked, only logged-in users will be able to view the page."))
sites = models.ManyToManyField(Site)
class Meta:
db_table = 'django_flatpage'
verbose_name = _('flat page')
verbose_name_plural = _('flat pages')
ordering = ('url',)
def __unicode__(self):
return u"%s -- %s" % (self.url, self.title)
def get_absolute_url(self):
return self.url
| bsd-3-clause |
dariemp/odoo | addons/hr_recruitment/__init__.py | 433 | 1145 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment
import report
import wizard
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
JaneliaSciComp/osgpyplusplus | examples/rough_translated1/osgwidgetshader.py | 1 | 1901 | #!/bin/env python
# Automatically translated python version of
# OpenSceneGraph example program "osgwidgetshader"
# !!! This program will need manual tuning before it will work. !!!
import sys
from osgpypp import osgDB
from osgpypp import osgWidget
# Translated from file 'osgwidgetshader.cpp'
# -*-c++-*- osgWidget - Code by: Jeremy Moles (cubicool) 2007-2008
# $Id: osgwidgetshader.cpp 28 2008-03-26 15:26:48Z cubicool $
#include <osgDB/FileUtils>
#include <osgWidget/Util>
#include <osgWidget/WindowManager>
#include <osgWidget/Canvas>
MASK_2D = 0xF0000000
def createWidget(name, col, layer):
widget = osgWidget.Widget(name, 200.0, 200.0)
widget.setColor(col, col, col, 0.2)
widget.setLayer(layer)
return widget
def main(argv):
viewer = osgViewer.Viewer()
wm = osgWidget.WindowManager(
viewer,
1280.0,
1024.0,
MASK_2D
)
canvas = osgWidget.Canvas("canvas")
canvas.attachMoveCallback()
canvas.attachScaleCallback()
canvas.addWidget(
createWidget("w1", 0.2, osgWidget.Widget.LAYER_LOW),
0.0,
0.0
)
canvas.addWidget(
createWidget("w2", 0.4, osgWidget.Widget.LAYER_MIDDLE),
200.0,
0.0
)
canvas.addWidget(
createWidget("w3", 0.6, osgWidget.Widget.LAYER_HIGH),
400.0,
0.0
)
wm.addChild(canvas)
program = osg.Program()
program.addShader(osg.Shader.readShaderFile(
osg.Shader.VERTEX,
osgDB.findDataFile("osgWidget/osgwidgetshader-vert.glsl")
))
program.addShader(osg.Shader.readShaderFile(
osg.Shader.FRAGMENT,
osgDB.findDataFile("osgWidget/osgwidgetshader-frag.glsl")
))
canvas.getGeode().getOrCreateStateSet().setAttribute(program)
return osgWidget.createExample(viewer, wm)
if __name__ == "__main__":
main(sys.argv)
| bsd-3-clause |
orbitfp7/nova | nova/db/sqlalchemy/migrate_repo/versions/245_add_mtu_and_dhcp_server.py | 32 | 3564 | # Copyright (c) 2014 Nebula, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Column, Table
from sqlalchemy import Boolean, Integer
from nova.db.sqlalchemy import types
def upgrade(migrate_engine):
"""Function adds network mtu, dhcp_server, and share_dhcp fields."""
meta = MetaData(bind=migrate_engine)
networks = Table('networks', meta, autoload=True)
shadow_networks = Table('shadow_networks', meta, autoload=True)
# NOTE(vish): ignore duplicate runs of upgrade so this can
# be backported
mtu = Column('mtu', Integer)
dhcp_server = Column('dhcp_server', types.IPAddress)
enable_dhcp = Column('enable_dhcp', Boolean, default=True)
share_address = Column('share_address', Boolean, default=False)
if not hasattr(networks.c, 'mtu'):
networks.create_column(mtu)
if not hasattr(networks.c, 'dhcp_server'):
networks.create_column(dhcp_server)
if not hasattr(networks.c, 'enable_dhcp'):
networks.create_column(enable_dhcp)
if not hasattr(networks.c, 'share_address'):
networks.create_column(share_address)
if not hasattr(shadow_networks.c, 'mtu'):
shadow_networks.create_column(mtu.copy())
if not hasattr(shadow_networks.c, 'dhcp_server'):
shadow_networks.create_column(dhcp_server.copy())
if not hasattr(shadow_networks.c, 'enable_dhcp'):
shadow_networks.create_column(enable_dhcp.copy())
if not hasattr(shadow_networks.c, 'share_address'):
shadow_networks.create_column(share_address.copy())
# NOTE(vish): sqlite won't drop bool columns because it leaves a
# constraint behind so work around it.
def drop_boolean(column):
for constraint in column.table.constraints:
if column.name in unicode(getattr(constraint, 'sqltext', '')):
column.table.constraints.remove(constraint)
break
column.drop()
def downgrade(migrate_engine):
"""Function removes network mtu, dhcp_server, and share_dhcp fields."""
meta = MetaData(bind=migrate_engine)
networks = Table('networks', meta, autoload=True)
shadow_networks = Table('shadow_networks', meta, autoload=True)
# NOTE(vish): ignore duplicate runs of upgrade so this can
# be backported
if hasattr(networks.c, 'mtu'):
networks.c.mtu.drop()
if hasattr(networks.c, 'dhcp_server'):
networks.c.dhcp_server.drop()
if hasattr(networks.c, 'enable_dhcp'):
drop_boolean(networks.c.enable_dhcp)
if hasattr(networks.c, 'share_address'):
drop_boolean(networks.c.share_address)
if hasattr(shadow_networks.c, 'mtu'):
shadow_networks.c.mtu.drop()
if hasattr(shadow_networks.c, 'dhcp_server'):
shadow_networks.c.dhcp_server.drop()
if hasattr(shadow_networks.c, 'enable_dhcp'):
drop_boolean(shadow_networks.c.enable_dhcp)
if hasattr(shadow_networks.c, 'share_address'):
drop_boolean(shadow_networks.c.share_address)
| apache-2.0 |
tswsl1989/Minecraft-Overviewer | test/test_regionTrimmer.py | 4 | 2357 | import unittest
from pathlib import Path
from tempfile import TemporaryDirectory
import networkx
import contrib.regionTrimmer as region_trimmer
class TestRegionTrimmer(unittest.TestCase):
def test_get_nodes(self):
coords = [(0, 0), (0, -1), (-1, 0), (-1, -1)]
with TemporaryDirectory() as tmpdirname:
region_file = Path(tmpdirname)
for x, z in coords:
region_fname = "r.{x}.{z}.mca".format(x=x, z=z)
(region_file / region_fname).touch()
nodes = region_trimmer.get_nodes(region_file)
self.assertListEqual(sorted(nodes), sorted(coords))
def test_get_nodes_returns_empty_list_when_no_region_files(self):
with TemporaryDirectory() as tmpdirname:
region_file = Path(tmpdirname)
(region_file / "not_region_file.txt").touch()
nodes = region_trimmer.get_nodes(region_file)
self.assertListEqual(nodes, [])
def test_get_region_file_from_node(self):
node = (0, 0)
regionset_path = Path('/path/to/regions')
self.assertEqual(region_trimmer.get_region_file_from_node(
regionset_path, node), Path('/path/to/regions/r.0.0.mca'))
def test_get_graph_bounds(self):
""" Should return (max_x, min_x, max_z, min_z) of all nodes
"""
graph = networkx.Graph()
graph.add_nodes_from([(0, 0), (0, -1), (-1, 0), (-1, -1)])
self.assertEqual(region_trimmer.get_graph_bounds(graph), (0, -1, 0, -1))
def test_get_graph_center_by_bounds(self):
self.assertEqual(region_trimmer.get_graph_center_by_bounds((0, -1, 0, -1)), (-1, -1))
def test_generate_edges(self):
graph = networkx.Graph()
graph.add_nodes_from(
[(0, 0), (0, -1), (-1, 0), (-1, -1)]
)
graph = region_trimmer.generate_edges(graph)
self.assertEqual(
graph.adj,
{
(0, -1): {(0, 0): {}, (-1, -1): {}},
(0, 0): {
(0, -1): {},
(-1, 0): {},
(-1, -1): {},
},
(-1, 0): {(0, 0): {}, (-1, -1): {}},
(-1, -1): {
(0, -1): {},
(0, 0): {},
(-1, 0): {},
},
},
)
| gpl-3.0 |
datenbetrieb/odoo | addons/crm_project_issue/project_issue.py | 380 | 2373 |
from openerp.osv import osv, fields
class crm_lead_to_project_issue_wizard(osv.TransientModel):
""" wizard to convert a Lead into a Project Issue and move the Mail Thread """
_name = "crm.lead2projectissue.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one("crm.lead", "Lead", domain=[("type", "=", "lead")]),
"project_id": fields.many2one("project.project", "Project", domain=[("use_issues", "=", True)])
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_project_issue(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
Lead = self.pool["crm.lead"]
Issue = self.pool["project.issue"]
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = Lead.handle_partner_assignation(cr, uid, [lead.id], context=context)
partner = partner_ids[lead.id]
# create new project.issue
vals = {
"name": lead.name,
"description": lead.description,
"email_from": lead.email_from,
"project_id": wizard.project_id.id,
"partner_id": partner,
"user_id": None
}
issue_id = Issue.create(cr, uid, vals, context=None)
# move the mail thread
Lead.message_change_thread(cr, uid, lead.id, issue_id, "project.issue", context=context)
# delete the lead
Lead.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new Issue
view_id = self.pool.get('ir.ui.view').search(cr, uid, [('model', '=', 'project.issue'), ('name', '=', 'project_issue_form_view')])
return {
'name': 'Issue created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'project.issue',
'type': 'ir.actions.act_window',
'res_id': issue_id,
'context': context
}
| agpl-3.0 |
entwanne/Rodolphe | rodolphe/utils/middleware/tags.py | 2 | 1082 | from django.core.urlresolvers import resolve, reverse
from django.http import HttpResponsePermanentRedirect
from utils.tags import TagsSet
class TagsMiddleware:
main_view = 'main.views.home'
view = 'main.views.tag.search'
def process_request(self, request):
if not any(s in request.GET
for s in ('add_tag', 'exclude_tag', 'del_tag')):
return
add_tag = request.GET.get('add_tag')
exclude_tag = request.GET.get('exclude_tag')
del_tag = request.GET.get('del_tag')
url = resolve(request.path)
if url.view_name == self.view:
tags = TagsSet.from_string(url.kwargs.get('pattern'))
else:
tags = TagsSet()
if add_tag:
tags.add(add_tag)
if exclude_tag:
tags.exclude(exclude_tag)
if del_tag:
tags.remove(del_tag)
if tags:
new_url = reverse(self.view, args=(str(tags),))
else:
new_url = reverse(self.main_view)
return HttpResponsePermanentRedirect(new_url)
| bsd-2-clause |
gautamkmr/incubator-mxnet | example/kaggle-ndsb1/training_curves.py | 52 | 1879 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
## based on https://github.com/dmlc/mxnet/issues/1302
## Parses the model fit log file and generates a train/val vs epoch plot
import matplotlib.pyplot as plt
import numpy as np
import re
import argparse
parser = argparse.ArgumentParser(description='Parses log file and generates train/val curves')
parser.add_argument('--log-file', type=str,default="log_tr_va",
help='the path of log file')
args = parser.parse_args()
TR_RE = re.compile('.*?]\sTrain-accuracy=([\d\.]+)')
VA_RE = re.compile('.*?]\sValidation-accuracy=([\d\.]+)')
log = open(args.log_file).read()
log_tr = [float(x) for x in TR_RE.findall(log)]
log_va = [float(x) for x in VA_RE.findall(log)]
idx = np.arange(len(log_tr))
plt.figure(figsize=(8, 6))
plt.xlabel("Epoch")
plt.ylabel("Accuracy")
plt.plot(idx, log_tr, 'o', linestyle='-', color="r",
label="Train accuracy")
plt.plot(idx, log_va, 'o', linestyle='-', color="b",
label="Validation accuracy")
plt.legend(loc="best")
plt.xticks(np.arange(min(idx), max(idx)+1, 5))
plt.yticks(np.arange(0, 1, 0.2))
plt.ylim([0,1])
plt.show()
| apache-2.0 |
jakobworldpeace/scikit-learn | examples/ensemble/plot_gradient_boosting_oob.py | 82 | 4768 | """
======================================
Gradient Boosting Out-of-Bag estimates
======================================
Out-of-bag (OOB) estimates can be a useful heuristic to estimate
the "optimal" number of boosting iterations.
OOB estimates are almost identical to cross-validation estimates but
they can be computed on-the-fly without the need for repeated model
fitting.
OOB estimates are only available for Stochastic Gradient Boosting
(i.e. ``subsample < 1.0``), the estimates are derived from the improvement
in loss based on the examples not included in the bootstrap sample
(the so-called out-of-bag examples).
The OOB estimator is a pessimistic estimator of the true
test loss, but remains a fairly good approximation for a small number of trees.
The figure shows the cumulative sum of the negative OOB improvements
as a function of the boosting iteration. As you can see, it tracks the test
loss for the first hundred iterations but then diverges in a
pessimistic way.
The figure also shows the performance of 3-fold cross validation which
usually gives a better estimate of the test loss
but is computationally more demanding.
"""
print(__doc__)
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
#
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import ensemble
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
# Generate data (adapted from G. Ridgeway's gbm example)
n_samples = 1000
random_state = np.random.RandomState(13)
x1 = random_state.uniform(size=n_samples)
x2 = random_state.uniform(size=n_samples)
x3 = random_state.randint(0, 4, size=n_samples)
p = 1 / (1.0 + np.exp(-(np.sin(3 * x1) - 4 * x2 + x3)))
y = random_state.binomial(1, p, size=n_samples)
X = np.c_[x1, x2, x3]
X = X.astype(np.float32)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5,
random_state=9)
# Fit classifier with out-of-bag estimates
params = {'n_estimators': 1200, 'max_depth': 3, 'subsample': 0.5,
'learning_rate': 0.01, 'min_samples_leaf': 1, 'random_state': 3}
clf = ensemble.GradientBoostingClassifier(**params)
clf.fit(X_train, y_train)
acc = clf.score(X_test, y_test)
print("Accuracy: {:.4f}".format(acc))
n_estimators = params['n_estimators']
x = np.arange(n_estimators) + 1
def heldout_score(clf, X_test, y_test):
"""compute deviance scores on ``X_test`` and ``y_test``. """
score = np.zeros((n_estimators,), dtype=np.float64)
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
score[i] = clf.loss_(y_test, y_pred)
return score
def cv_estimate(n_splits=3):
cv = KFold(n_splits=n_splits)
cv_clf = ensemble.GradientBoostingClassifier(**params)
val_scores = np.zeros((n_estimators,), dtype=np.float64)
for train, test in cv.split(X_train, y_train):
cv_clf.fit(X_train[train], y_train[train])
val_scores += heldout_score(cv_clf, X_train[test], y_train[test])
val_scores /= n_splits
return val_scores
# Estimate best n_estimator using cross-validation
cv_score = cv_estimate(3)
# Compute best n_estimator for test data
test_score = heldout_score(clf, X_test, y_test)
# negative cumulative sum of oob improvements
cumsum = -np.cumsum(clf.oob_improvement_)
# min loss according to OOB
oob_best_iter = x[np.argmin(cumsum)]
# min loss according to test (normalize such that first loss is 0)
test_score -= test_score[0]
test_best_iter = x[np.argmin(test_score)]
# min loss according to cv (normalize such that first loss is 0)
cv_score -= cv_score[0]
cv_best_iter = x[np.argmin(cv_score)]
# color brew for the three curves
oob_color = list(map(lambda x: x / 256.0, (190, 174, 212)))
test_color = list(map(lambda x: x / 256.0, (127, 201, 127)))
cv_color = list(map(lambda x: x / 256.0, (253, 192, 134)))
# plot curves and vertical lines for best iterations
plt.plot(x, cumsum, label='OOB loss', color=oob_color)
plt.plot(x, test_score, label='Test loss', color=test_color)
plt.plot(x, cv_score, label='CV loss', color=cv_color)
plt.axvline(x=oob_best_iter, color=oob_color)
plt.axvline(x=test_best_iter, color=test_color)
plt.axvline(x=cv_best_iter, color=cv_color)
# add three vertical lines to xticks
xticks = plt.xticks()
xticks_pos = np.array(xticks[0].tolist() +
[oob_best_iter, cv_best_iter, test_best_iter])
xticks_label = np.array(list(map(lambda t: int(t), xticks[0])) +
['OOB', 'CV', 'Test'])
ind = np.argsort(xticks_pos)
xticks_pos = xticks_pos[ind]
xticks_label = xticks_label[ind]
plt.xticks(xticks_pos, xticks_label)
plt.legend(loc='upper right')
plt.ylabel('normalized loss')
plt.xlabel('number of iterations')
plt.show()
| bsd-3-clause |
AwesomeTurtle/personfinder | app/admin_dashboard.py | 5 | 4577 | #!/usr/bin/python2.7
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import simplejson
import sys
from model import *
from utils import *
def encode_date(object):
"""Encodes Python dates as specially marked JavaScript strings."""
if isinstance(object, datetime):
y, l, d, h, m, s = object.timetuple()[:6]
return '<<new Date(%d,%d,%d,%d,%d)>>' % (y, l - 1, d, h, m)
def pack_json(json):
"""Compacts JSON to save bandwidth (currently saves about 40%)."""
# Remove unnecessary spaces and punctuation.
json = json.replace('{"c": ', '{c:').replace('{"v": ', '{v:')
json = json.replace('}, {', '},{')
# Replace "new Date(...)" with a shorter function call, "D(...)".
json = ('(D = function(y,l,d,h,m) {return new Date(y,l,d,h,m);}) && ' +
json.replace('new Date(', 'D('))
return json
class Handler(BaseHandler):
# If a repo is specified, this dashboard shows information about just that
# repo; otherwise it shows information for all repositories by default.
repo_required = False
# Show stats even for deactivated repositories.
ignore_deactivation = True
admin_required = True
def get(self):
# Determine the time range to display. We currently show the last
# 7 days of data, which encodes to about 100 kb of JSON text.
max_time = get_utcnow()
min_time = max_time - timedelta(7)
# Gather the data into a table, with a column for each repository. See:
# http://code.google.com/apis/visualization/documentation/reference.html#dataparam
active_repos = sorted(Repo.list_active())
launched_repos = sorted(Repo.list_launched())
if self.repo:
active_repos = launched_repos = [self.repo]
data = {}
for scan_name in ['person', 'note']:
data[scan_name] = []
blanks = []
for repo in launched_repos:
query = Counter.all_finished_counters(repo, scan_name)
counters = query.filter('timestamp >', min_time).fetch(1000)
data[scan_name] += [
{'c': [{'v': c.timestamp}] + blanks + [{'v': c.get('all')}]}
for c in counters
]
# Move over one column for the next repository.
blanks.append({})
# Gather the counts as well.
data['counts'] = {}
counter_names = ['person.all', 'note.all']
counter_names += ['person.status=' + status
for status in [''] + pfif.NOTE_STATUS_VALUES]
counter_names += ['person.linked_persons=%d' % n for n in range(10)]
counter_names += ['note.last_known_location', 'note.linked_person']
counter_names += ['note.status=' + status
for status in [''] + pfif.NOTE_STATUS_VALUES]
for repo in active_repos:
data['counts'][repo] = dict(
(name, Counter.get_count(repo, name))
for name in counter_names)
data['sources'] = {}
for repo in active_repos:
counts_by_source = {}
for kind in ['person', 'note']:
for name, count in Counter.get_all_counts(repo, kind).items():
if name.startswith('original_domain='):
source = name.split('=', 1)[1]
counts_by_source.setdefault(source, {})[kind] = count
data['sources'][repo] = sorted(counts_by_source.items())
# Encode the data as JSON.
json = simplejson.dumps(data, default=encode_date)
# Convert the specially marked JavaScript strings to JavaScript dates.
json = json.replace('"<<', '').replace('>>"', '')
# Render the page with the JSON data in it.
self.render('admin_dashboard.html',
data_js=pack_json(json),
launched_repos_js=simplejson.dumps(launched_repos),
active_repos_js=simplejson.dumps(active_repos))
| apache-2.0 |
tinkerthaler/odoo | addons/l10n_mx/__init__.py | 975 | 1058 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
BurntSushi/genecentric | bpm/cmdargs/funcassociateinfo.py | 1 | 1332 | '''
'funcassociateinfo.py' sets up the command line arguments for the
'genecentric-fainfo' program.
'''
import sys
import bpm
import argparse
parser = argparse.ArgumentParser(
description='Query Funcassociate for information to use with \'go-enrich\'',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
aa = parser.add_argument
aa('command', type=str, choices=['species', 'namespaces'],
metavar='QUERY_COMMAND',
help='The \'species\' command will ask Funcassociate for a list of '
'available species to perform GO enrichment with. The \'namespaces\' '
'command, with a corresponding species name, will ask Funcassociate '
'for a list of available namespaces to use with the species '
'specified.')
aa('species', type=str, nargs='?', default=None, metavar='QUERY_SPECIES',
help='The species to be used when querying for available namespaces. '
'This should not be used with the \'species\' command.')
aa('-v', '--verbose', dest='verbose', action='store_true',
help='If set, more output will be shown.')
conf = parser.parse_args()
if conf.command == 'namespaces' and conf.species is None:
print >> sys.stderr, \
'You must provide a species when using the \'namespace\' command.'
sys.exit(1)
# Set the global conf variable
bpm.conf = conf
| gpl-2.0 |
bussiere/pypyjs | website/demo/home/rfk/repos/pypy/lib-python/2.7/email/test/test_email.py | 35 | 129419 | # Copyright (C) 2001-2010 Python Software Foundation
# Contact: email-sig@python.org
# email package unit tests
import os
import sys
import time
import base64
import difflib
import unittest
import warnings
from cStringIO import StringIO
import email
from email.Charset import Charset
from email.Header import Header, decode_header, make_header
from email.Parser import Parser, HeaderParser
from email.Generator import Generator, DecodedGenerator
from email.Message import Message
from email.MIMEAudio import MIMEAudio
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
from email.MIMEBase import MIMEBase
from email.MIMEMessage import MIMEMessage
from email.MIMEMultipart import MIMEMultipart
from email import Utils
from email import Errors
from email import Encoders
from email import Iterators
from email import base64MIME
from email import quopriMIME
from test.test_support import findfile, run_unittest
from email.test import __file__ as landmark
NL = '\n'
EMPTYSTRING = ''
SPACE = ' '
def openfile(filename, mode='r'):
path = os.path.join(os.path.dirname(landmark), 'data', filename)
return open(path, mode)
# Base test class
class TestEmailBase(unittest.TestCase):
def ndiffAssertEqual(self, first, second):
"""Like assertEqual except use ndiff for readable output."""
if first != second:
sfirst = str(first)
ssecond = str(second)
diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines())
fp = StringIO()
print >> fp, NL, NL.join(diff)
raise self.failureException, fp.getvalue()
def _msgobj(self, filename):
fp = openfile(findfile(filename))
try:
msg = email.message_from_file(fp)
finally:
fp.close()
return msg
# Test various aspects of the Message class's API
class TestMessageAPI(TestEmailBase):
def test_get_all(self):
eq = self.assertEqual
msg = self._msgobj('msg_20.txt')
eq(msg.get_all('cc'), ['ccc@zzz.org', 'ddd@zzz.org', 'eee@zzz.org'])
eq(msg.get_all('xx', 'n/a'), 'n/a')
def test_getset_charset(self):
eq = self.assertEqual
msg = Message()
eq(msg.get_charset(), None)
charset = Charset('iso-8859-1')
msg.set_charset(charset)
eq(msg['mime-version'], '1.0')
eq(msg.get_content_type(), 'text/plain')
eq(msg['content-type'], 'text/plain; charset="iso-8859-1"')
eq(msg.get_param('charset'), 'iso-8859-1')
eq(msg['content-transfer-encoding'], 'quoted-printable')
eq(msg.get_charset().input_charset, 'iso-8859-1')
# Remove the charset
msg.set_charset(None)
eq(msg.get_charset(), None)
eq(msg['content-type'], 'text/plain')
# Try adding a charset when there's already MIME headers present
msg = Message()
msg['MIME-Version'] = '2.0'
msg['Content-Type'] = 'text/x-weird'
msg['Content-Transfer-Encoding'] = 'quinted-puntable'
msg.set_charset(charset)
eq(msg['mime-version'], '2.0')
eq(msg['content-type'], 'text/x-weird; charset="iso-8859-1"')
eq(msg['content-transfer-encoding'], 'quinted-puntable')
def test_set_charset_from_string(self):
eq = self.assertEqual
msg = Message()
msg.set_charset('us-ascii')
eq(msg.get_charset().input_charset, 'us-ascii')
eq(msg['content-type'], 'text/plain; charset="us-ascii"')
def test_set_payload_with_charset(self):
msg = Message()
charset = Charset('iso-8859-1')
msg.set_payload('This is a string payload', charset)
self.assertEqual(msg.get_charset().input_charset, 'iso-8859-1')
def test_get_charsets(self):
eq = self.assertEqual
msg = self._msgobj('msg_08.txt')
charsets = msg.get_charsets()
eq(charsets, [None, 'us-ascii', 'iso-8859-1', 'iso-8859-2', 'koi8-r'])
msg = self._msgobj('msg_09.txt')
charsets = msg.get_charsets('dingbat')
eq(charsets, ['dingbat', 'us-ascii', 'iso-8859-1', 'dingbat',
'koi8-r'])
msg = self._msgobj('msg_12.txt')
charsets = msg.get_charsets()
eq(charsets, [None, 'us-ascii', 'iso-8859-1', None, 'iso-8859-2',
'iso-8859-3', 'us-ascii', 'koi8-r'])
def test_get_filename(self):
eq = self.assertEqual
msg = self._msgobj('msg_04.txt')
filenames = [p.get_filename() for p in msg.get_payload()]
eq(filenames, ['msg.txt', 'msg.txt'])
msg = self._msgobj('msg_07.txt')
subpart = msg.get_payload(1)
eq(subpart.get_filename(), 'dingusfish.gif')
def test_get_filename_with_name_parameter(self):
eq = self.assertEqual
msg = self._msgobj('msg_44.txt')
filenames = [p.get_filename() for p in msg.get_payload()]
eq(filenames, ['msg.txt', 'msg.txt'])
def test_get_boundary(self):
eq = self.assertEqual
msg = self._msgobj('msg_07.txt')
# No quotes!
eq(msg.get_boundary(), 'BOUNDARY')
def test_set_boundary(self):
eq = self.assertEqual
# This one has no existing boundary parameter, but the Content-Type:
# header appears fifth.
msg = self._msgobj('msg_01.txt')
msg.set_boundary('BOUNDARY')
header, value = msg.items()[4]
eq(header.lower(), 'content-type')
eq(value, 'text/plain; charset="us-ascii"; boundary="BOUNDARY"')
# This one has a Content-Type: header, with a boundary, stuck in the
# middle of its headers. Make sure the order is preserved; it should
# be fifth.
msg = self._msgobj('msg_04.txt')
msg.set_boundary('BOUNDARY')
header, value = msg.items()[4]
eq(header.lower(), 'content-type')
eq(value, 'multipart/mixed; boundary="BOUNDARY"')
# And this one has no Content-Type: header at all.
msg = self._msgobj('msg_03.txt')
self.assertRaises(Errors.HeaderParseError,
msg.set_boundary, 'BOUNDARY')
def test_make_boundary(self):
msg = MIMEMultipart('form-data')
# Note that when the boundary gets created is an implementation
# detail and might change.
self.assertEqual(msg.items()[0][1], 'multipart/form-data')
# Trigger creation of boundary
msg.as_string()
self.assertEqual(msg.items()[0][1][:33],
'multipart/form-data; boundary="==')
# XXX: there ought to be tests of the uniqueness of the boundary, too.
def test_message_rfc822_only(self):
# Issue 7970: message/rfc822 not in multipart parsed by
# HeaderParser caused an exception when flattened.
fp = openfile(findfile('msg_46.txt'))
msgdata = fp.read()
parser = email.Parser.HeaderParser()
msg = parser.parsestr(msgdata)
out = StringIO()
gen = email.Generator.Generator(out, True, 0)
gen.flatten(msg, False)
self.assertEqual(out.getvalue(), msgdata)
def test_get_decoded_payload(self):
eq = self.assertEqual
msg = self._msgobj('msg_10.txt')
# The outer message is a multipart
eq(msg.get_payload(decode=True), None)
# Subpart 1 is 7bit encoded
eq(msg.get_payload(0).get_payload(decode=True),
'This is a 7bit encoded message.\n')
# Subpart 2 is quopri
eq(msg.get_payload(1).get_payload(decode=True),
'\xa1This is a Quoted Printable encoded message!\n')
# Subpart 3 is base64
eq(msg.get_payload(2).get_payload(decode=True),
'This is a Base64 encoded message.')
# Subpart 4 is base64 with a trailing newline, which
# used to be stripped (issue 7143).
eq(msg.get_payload(3).get_payload(decode=True),
'This is a Base64 encoded message.\n')
# Subpart 5 has no Content-Transfer-Encoding: header.
eq(msg.get_payload(4).get_payload(decode=True),
'This has no Content-Transfer-Encoding: header.\n')
def test_get_decoded_uu_payload(self):
eq = self.assertEqual
msg = Message()
msg.set_payload('begin 666 -\n+:&5L;&\\@=V]R;&0 \n \nend\n')
for cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
msg['content-transfer-encoding'] = cte
eq(msg.get_payload(decode=True), 'hello world')
# Now try some bogus data
msg.set_payload('foo')
eq(msg.get_payload(decode=True), 'foo')
def test_decode_bogus_uu_payload_quietly(self):
msg = Message()
msg.set_payload('begin 664 foo.txt\n%<W1F=0000H \n \nend\n')
msg['Content-Transfer-Encoding'] = 'x-uuencode'
old_stderr = sys.stderr
try:
sys.stderr = sfp = StringIO()
# We don't care about the payload
msg.get_payload(decode=True)
finally:
sys.stderr = old_stderr
self.assertEqual(sfp.getvalue(), '')
def test_decoded_generator(self):
eq = self.assertEqual
msg = self._msgobj('msg_07.txt')
fp = openfile('msg_17.txt')
try:
text = fp.read()
finally:
fp.close()
s = StringIO()
g = DecodedGenerator(s)
g.flatten(msg)
eq(s.getvalue(), text)
def test__contains__(self):
msg = Message()
msg['From'] = 'Me'
msg['to'] = 'You'
# Check for case insensitivity
self.assertTrue('from' in msg)
self.assertTrue('From' in msg)
self.assertTrue('FROM' in msg)
self.assertTrue('to' in msg)
self.assertTrue('To' in msg)
self.assertTrue('TO' in msg)
def test_as_string(self):
eq = self.assertEqual
msg = self._msgobj('msg_01.txt')
fp = openfile('msg_01.txt')
try:
# BAW 30-Mar-2009 Evil be here. So, the generator is broken with
# respect to long line breaking. It's also not idempotent when a
# header from a parsed message is continued with tabs rather than
# spaces. Before we fixed bug 1974 it was reversedly broken,
# i.e. headers that were continued with spaces got continued with
# tabs. For Python 2.x there's really no good fix and in Python
# 3.x all this stuff is re-written to be right(er). Chris Withers
# convinced me that using space as the default continuation
# character is less bad for more applications.
text = fp.read().replace('\t', ' ')
finally:
fp.close()
eq(text, msg.as_string())
fullrepr = str(msg)
lines = fullrepr.split('\n')
self.assertTrue(lines[0].startswith('From '))
eq(text, NL.join(lines[1:]))
def test_bad_param(self):
msg = email.message_from_string("Content-Type: blarg; baz; boo\n")
self.assertEqual(msg.get_param('baz'), '')
def test_missing_filename(self):
msg = email.message_from_string("From: foo\n")
self.assertEqual(msg.get_filename(), None)
def test_bogus_filename(self):
msg = email.message_from_string(
"Content-Disposition: blarg; filename\n")
self.assertEqual(msg.get_filename(), '')
def test_missing_boundary(self):
msg = email.message_from_string("From: foo\n")
self.assertEqual(msg.get_boundary(), None)
def test_get_params(self):
eq = self.assertEqual
msg = email.message_from_string(
'X-Header: foo=one; bar=two; baz=three\n')
eq(msg.get_params(header='x-header'),
[('foo', 'one'), ('bar', 'two'), ('baz', 'three')])
msg = email.message_from_string(
'X-Header: foo; bar=one; baz=two\n')
eq(msg.get_params(header='x-header'),
[('foo', ''), ('bar', 'one'), ('baz', 'two')])
eq(msg.get_params(), None)
msg = email.message_from_string(
'X-Header: foo; bar="one"; baz=two\n')
eq(msg.get_params(header='x-header'),
[('foo', ''), ('bar', 'one'), ('baz', 'two')])
def test_get_param_liberal(self):
msg = Message()
msg['Content-Type'] = 'Content-Type: Multipart/mixed; boundary = "CPIMSSMTPC06p5f3tG"'
self.assertEqual(msg.get_param('boundary'), 'CPIMSSMTPC06p5f3tG')
def test_get_param(self):
eq = self.assertEqual
msg = email.message_from_string(
"X-Header: foo=one; bar=two; baz=three\n")
eq(msg.get_param('bar', header='x-header'), 'two')
eq(msg.get_param('quuz', header='x-header'), None)
eq(msg.get_param('quuz'), None)
msg = email.message_from_string(
'X-Header: foo; bar="one"; baz=two\n')
eq(msg.get_param('foo', header='x-header'), '')
eq(msg.get_param('bar', header='x-header'), 'one')
eq(msg.get_param('baz', header='x-header'), 'two')
# XXX: We are not RFC-2045 compliant! We cannot parse:
# msg["Content-Type"] = 'text/plain; weird="hey; dolly? [you] @ <\\"home\\">?"'
# msg.get_param("weird")
# yet.
def test_get_param_funky_continuation_lines(self):
msg = self._msgobj('msg_22.txt')
self.assertEqual(msg.get_payload(1).get_param('name'), 'wibble.JPG')
def test_get_param_with_semis_in_quotes(self):
msg = email.message_from_string(
'Content-Type: image/pjpeg; name="Jim&&Jill"\n')
self.assertEqual(msg.get_param('name'), 'Jim&&Jill')
self.assertEqual(msg.get_param('name', unquote=False),
'"Jim&&Jill"')
def test_get_param_with_quotes(self):
msg = email.message_from_string(
'Content-Type: foo; bar*0="baz\\"foobar"; bar*1="\\"baz"')
self.assertEqual(msg.get_param('bar'), 'baz"foobar"baz')
msg = email.message_from_string(
"Content-Type: foo; bar*0=\"baz\\\"foobar\"; bar*1=\"\\\"baz\"")
self.assertEqual(msg.get_param('bar'), 'baz"foobar"baz')
def test_has_key(self):
msg = email.message_from_string('Header: exists')
self.assertTrue(msg.has_key('header'))
self.assertTrue(msg.has_key('Header'))
self.assertTrue(msg.has_key('HEADER'))
self.assertFalse(msg.has_key('headeri'))
def test_set_param(self):
eq = self.assertEqual
msg = Message()
msg.set_param('charset', 'iso-2022-jp')
eq(msg.get_param('charset'), 'iso-2022-jp')
msg.set_param('importance', 'high value')
eq(msg.get_param('importance'), 'high value')
eq(msg.get_param('importance', unquote=False), '"high value"')
eq(msg.get_params(), [('text/plain', ''),
('charset', 'iso-2022-jp'),
('importance', 'high value')])
eq(msg.get_params(unquote=False), [('text/plain', ''),
('charset', '"iso-2022-jp"'),
('importance', '"high value"')])
msg.set_param('charset', 'iso-9999-xx', header='X-Jimmy')
eq(msg.get_param('charset', header='X-Jimmy'), 'iso-9999-xx')
def test_del_param(self):
eq = self.assertEqual
msg = self._msgobj('msg_05.txt')
eq(msg.get_params(),
[('multipart/report', ''), ('report-type', 'delivery-status'),
('boundary', 'D1690A7AC1.996856090/mail.example.com')])
old_val = msg.get_param("report-type")
msg.del_param("report-type")
eq(msg.get_params(),
[('multipart/report', ''),
('boundary', 'D1690A7AC1.996856090/mail.example.com')])
msg.set_param("report-type", old_val)
eq(msg.get_params(),
[('multipart/report', ''),
('boundary', 'D1690A7AC1.996856090/mail.example.com'),
('report-type', old_val)])
def test_del_param_on_other_header(self):
msg = Message()
msg.add_header('Content-Disposition', 'attachment', filename='bud.gif')
msg.del_param('filename', 'content-disposition')
self.assertEqual(msg['content-disposition'], 'attachment')
def test_set_type(self):
eq = self.assertEqual
msg = Message()
self.assertRaises(ValueError, msg.set_type, 'text')
msg.set_type('text/plain')
eq(msg['content-type'], 'text/plain')
msg.set_param('charset', 'us-ascii')
eq(msg['content-type'], 'text/plain; charset="us-ascii"')
msg.set_type('text/html')
eq(msg['content-type'], 'text/html; charset="us-ascii"')
def test_set_type_on_other_header(self):
msg = Message()
msg['X-Content-Type'] = 'text/plain'
msg.set_type('application/octet-stream', 'X-Content-Type')
self.assertEqual(msg['x-content-type'], 'application/octet-stream')
def test_get_content_type_missing(self):
msg = Message()
self.assertEqual(msg.get_content_type(), 'text/plain')
def test_get_content_type_missing_with_default_type(self):
msg = Message()
msg.set_default_type('message/rfc822')
self.assertEqual(msg.get_content_type(), 'message/rfc822')
def test_get_content_type_from_message_implicit(self):
msg = self._msgobj('msg_30.txt')
self.assertEqual(msg.get_payload(0).get_content_type(),
'message/rfc822')
def test_get_content_type_from_message_explicit(self):
msg = self._msgobj('msg_28.txt')
self.assertEqual(msg.get_payload(0).get_content_type(),
'message/rfc822')
def test_get_content_type_from_message_text_plain_implicit(self):
msg = self._msgobj('msg_03.txt')
self.assertEqual(msg.get_content_type(), 'text/plain')
def test_get_content_type_from_message_text_plain_explicit(self):
msg = self._msgobj('msg_01.txt')
self.assertEqual(msg.get_content_type(), 'text/plain')
def test_get_content_maintype_missing(self):
msg = Message()
self.assertEqual(msg.get_content_maintype(), 'text')
def test_get_content_maintype_missing_with_default_type(self):
msg = Message()
msg.set_default_type('message/rfc822')
self.assertEqual(msg.get_content_maintype(), 'message')
def test_get_content_maintype_from_message_implicit(self):
msg = self._msgobj('msg_30.txt')
self.assertEqual(msg.get_payload(0).get_content_maintype(), 'message')
def test_get_content_maintype_from_message_explicit(self):
msg = self._msgobj('msg_28.txt')
self.assertEqual(msg.get_payload(0).get_content_maintype(), 'message')
def test_get_content_maintype_from_message_text_plain_implicit(self):
msg = self._msgobj('msg_03.txt')
self.assertEqual(msg.get_content_maintype(), 'text')
def test_get_content_maintype_from_message_text_plain_explicit(self):
msg = self._msgobj('msg_01.txt')
self.assertEqual(msg.get_content_maintype(), 'text')
def test_get_content_subtype_missing(self):
msg = Message()
self.assertEqual(msg.get_content_subtype(), 'plain')
def test_get_content_subtype_missing_with_default_type(self):
msg = Message()
msg.set_default_type('message/rfc822')
self.assertEqual(msg.get_content_subtype(), 'rfc822')
def test_get_content_subtype_from_message_implicit(self):
msg = self._msgobj('msg_30.txt')
self.assertEqual(msg.get_payload(0).get_content_subtype(), 'rfc822')
def test_get_content_subtype_from_message_explicit(self):
msg = self._msgobj('msg_28.txt')
self.assertEqual(msg.get_payload(0).get_content_subtype(), 'rfc822')
def test_get_content_subtype_from_message_text_plain_implicit(self):
msg = self._msgobj('msg_03.txt')
self.assertEqual(msg.get_content_subtype(), 'plain')
def test_get_content_subtype_from_message_text_plain_explicit(self):
msg = self._msgobj('msg_01.txt')
self.assertEqual(msg.get_content_subtype(), 'plain')
def test_get_content_maintype_error(self):
msg = Message()
msg['Content-Type'] = 'no-slash-in-this-string'
self.assertEqual(msg.get_content_maintype(), 'text')
def test_get_content_subtype_error(self):
msg = Message()
msg['Content-Type'] = 'no-slash-in-this-string'
self.assertEqual(msg.get_content_subtype(), 'plain')
def test_replace_header(self):
eq = self.assertEqual
msg = Message()
msg.add_header('First', 'One')
msg.add_header('Second', 'Two')
msg.add_header('Third', 'Three')
eq(msg.keys(), ['First', 'Second', 'Third'])
eq(msg.values(), ['One', 'Two', 'Three'])
msg.replace_header('Second', 'Twenty')
eq(msg.keys(), ['First', 'Second', 'Third'])
eq(msg.values(), ['One', 'Twenty', 'Three'])
msg.add_header('First', 'Eleven')
msg.replace_header('First', 'One Hundred')
eq(msg.keys(), ['First', 'Second', 'Third', 'First'])
eq(msg.values(), ['One Hundred', 'Twenty', 'Three', 'Eleven'])
self.assertRaises(KeyError, msg.replace_header, 'Fourth', 'Missing')
def test_broken_base64_payload(self):
x = 'AwDp0P7//y6LwKEAcPa/6Q=9'
msg = Message()
msg['content-type'] = 'audio/x-midi'
msg['content-transfer-encoding'] = 'base64'
msg.set_payload(x)
self.assertEqual(msg.get_payload(decode=True), x)
def test_get_content_charset(self):
msg = Message()
msg.set_charset('us-ascii')
self.assertEqual('us-ascii', msg.get_content_charset())
msg.set_charset(u'us-ascii')
self.assertEqual('us-ascii', msg.get_content_charset())
# Issue 5871: reject an attempt to embed a header inside a header value
# (header injection attack).
def test_embeded_header_via_Header_rejected(self):
msg = Message()
msg['Dummy'] = Header('dummy\nX-Injected-Header: test')
self.assertRaises(Errors.HeaderParseError, msg.as_string)
def test_embeded_header_via_string_rejected(self):
msg = Message()
msg['Dummy'] = 'dummy\nX-Injected-Header: test'
self.assertRaises(Errors.HeaderParseError, msg.as_string)
# Test the email.Encoders module
class TestEncoders(unittest.TestCase):
def test_encode_empty_payload(self):
eq = self.assertEqual
msg = Message()
msg.set_charset('us-ascii')
eq(msg['content-transfer-encoding'], '7bit')
def test_default_cte(self):
eq = self.assertEqual
# 7bit data and the default us-ascii _charset
msg = MIMEText('hello world')
eq(msg['content-transfer-encoding'], '7bit')
# Similar, but with 8bit data
msg = MIMEText('hello \xf8 world')
eq(msg['content-transfer-encoding'], '8bit')
# And now with a different charset
msg = MIMEText('hello \xf8 world', _charset='iso-8859-1')
eq(msg['content-transfer-encoding'], 'quoted-printable')
def test_encode7or8bit(self):
# Make sure a charset whose input character set is 8bit but
# whose output character set is 7bit gets a transfer-encoding
# of 7bit.
eq = self.assertEqual
msg = email.MIMEText.MIMEText('\xca\xb8', _charset='euc-jp')
eq(msg['content-transfer-encoding'], '7bit')
# Test long header wrapping
class TestLongHeaders(TestEmailBase):
def test_split_long_continuation(self):
eq = self.ndiffAssertEqual
msg = email.message_from_string("""\
Subject: bug demonstration
\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
\tmore text
test
""")
sfp = StringIO()
g = Generator(sfp)
g.flatten(msg)
eq(sfp.getvalue(), """\
Subject: bug demonstration
12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
more text
test
""")
def test_another_long_almost_unsplittable_header(self):
eq = self.ndiffAssertEqual
hstr = """\
bug demonstration
\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
\tmore text"""
h = Header(hstr, continuation_ws='\t')
eq(h.encode(), """\
bug demonstration
\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
\tmore text""")
h = Header(hstr)
eq(h.encode(), """\
bug demonstration
12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
more text""")
def test_long_nonstring(self):
eq = self.ndiffAssertEqual
g = Charset("iso-8859-1")
cz = Charset("iso-8859-2")
utf8 = Charset("utf-8")
g_head = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
cz_head = "Finan\xe8ni metropole se hroutily pod tlakem jejich d\xf9vtipu.. "
utf8_head = u"\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das Nunstuck git und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt gersput.\u300d\u3068\u8a00\u3063\u3066\u3044\u307e\u3059\u3002".encode("utf-8")
h = Header(g_head, g, header_name='Subject')
h.append(cz_head, cz)
h.append(utf8_head, utf8)
msg = Message()
msg['Subject'] = h
sfp = StringIO()
g = Generator(sfp)
g.flatten(msg)
eq(sfp.getvalue(), """\
Subject: =?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerd?=
=?iso-8859-1?q?erband_komfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndi?=
=?iso-8859-1?q?schen_Wandgem=E4lden_vorbei=2C_gegen_die_rotierenden_Kling?=
=?iso-8859-1?q?en_bef=F6rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_met?=
=?iso-8859-2?q?ropole_se_hroutily_pod_tlakem_jejich_d=F9vtipu=2E=2E_?=
=?utf-8?b?5q2j56K644Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE?=
=?utf-8?b?44G+44Gb44KT44CC5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB?=
=?utf-8?b?44GC44Go44Gv44Gn44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CM?=
=?utf-8?q?Wenn_ist_das_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das?=
=?utf-8?b?IE9kZXIgZGllIEZsaXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBow==?=
=?utf-8?b?44Gm44GE44G+44GZ44CC?=
""")
eq(h.encode(), """\
=?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerd?=
=?iso-8859-1?q?erband_komfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndi?=
=?iso-8859-1?q?schen_Wandgem=E4lden_vorbei=2C_gegen_die_rotierenden_Kling?=
=?iso-8859-1?q?en_bef=F6rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_met?=
=?iso-8859-2?q?ropole_se_hroutily_pod_tlakem_jejich_d=F9vtipu=2E=2E_?=
=?utf-8?b?5q2j56K644Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE?=
=?utf-8?b?44G+44Gb44KT44CC5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB?=
=?utf-8?b?44GC44Go44Gv44Gn44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CM?=
=?utf-8?q?Wenn_ist_das_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das?=
=?utf-8?b?IE9kZXIgZGllIEZsaXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBow==?=
=?utf-8?b?44Gm44GE44G+44GZ44CC?=""")
def test_long_header_encode(self):
eq = self.ndiffAssertEqual
h = Header('wasnipoop; giraffes="very-long-necked-animals"; '
'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"',
header_name='X-Foobar-Spoink-Defrobnit')
eq(h.encode(), '''\
wasnipoop; giraffes="very-long-necked-animals";
spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"''')
def test_long_header_encode_with_tab_continuation(self):
eq = self.ndiffAssertEqual
h = Header('wasnipoop; giraffes="very-long-necked-animals"; '
'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"',
header_name='X-Foobar-Spoink-Defrobnit',
continuation_ws='\t')
eq(h.encode(), '''\
wasnipoop; giraffes="very-long-necked-animals";
\tspooge="yummy"; hippos="gargantuan"; marshmallows="gooey"''')
def test_header_splitter(self):
eq = self.ndiffAssertEqual
msg = MIMEText('')
# It'd be great if we could use add_header() here, but that doesn't
# guarantee an order of the parameters.
msg['X-Foobar-Spoink-Defrobnit'] = (
'wasnipoop; giraffes="very-long-necked-animals"; '
'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"')
sfp = StringIO()
g = Generator(sfp)
g.flatten(msg)
eq(sfp.getvalue(), '''\
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Foobar-Spoink-Defrobnit: wasnipoop; giraffes="very-long-necked-animals";
spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"
''')
def test_no_semis_header_splitter(self):
eq = self.ndiffAssertEqual
msg = Message()
msg['From'] = 'test@dom.ain'
msg['References'] = SPACE.join(['<%d@dom.ain>' % i for i in range(10)])
msg.set_payload('Test')
sfp = StringIO()
g = Generator(sfp)
g.flatten(msg)
eq(sfp.getvalue(), """\
From: test@dom.ain
References: <0@dom.ain> <1@dom.ain> <2@dom.ain> <3@dom.ain> <4@dom.ain>
<5@dom.ain> <6@dom.ain> <7@dom.ain> <8@dom.ain> <9@dom.ain>
Test""")
def test_no_split_long_header(self):
eq = self.ndiffAssertEqual
hstr = 'References: ' + 'x' * 80
h = Header(hstr, continuation_ws='\t')
eq(h.encode(), """\
References: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx""")
def test_splitting_multiple_long_lines(self):
eq = self.ndiffAssertEqual
hstr = """\
from babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
\tfrom babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
\tfrom babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
"""
h = Header(hstr, continuation_ws='\t')
eq(h.encode(), """\
from babylon.socal-raves.org (localhost [127.0.0.1]);
\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
\tfor <mailman-admin@babylon.socal-raves.org>;
\tSat, 2 Feb 2002 17:00:06 -0800 (PST)
\tfrom babylon.socal-raves.org (localhost [127.0.0.1]);
\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
\tfor <mailman-admin@babylon.socal-raves.org>;
\tSat, 2 Feb 2002 17:00:06 -0800 (PST)
\tfrom babylon.socal-raves.org (localhost [127.0.0.1]);
\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
\tfor <mailman-admin@babylon.socal-raves.org>;
\tSat, 2 Feb 2002 17:00:06 -0800 (PST)""")
def test_splitting_first_line_only_is_long(self):
eq = self.ndiffAssertEqual
hstr = """\
from modemcable093.139-201-24.que.mc.videotron.ca ([24.201.139.93] helo=cthulhu.gerg.ca)
\tby kronos.mems-exchange.org with esmtp (Exim 4.05)
\tid 17k4h5-00034i-00
\tfor test@mems-exchange.org; Wed, 28 Aug 2002 11:25:20 -0400"""
h = Header(hstr, maxlinelen=78, header_name='Received',
continuation_ws='\t')
eq(h.encode(), """\
from modemcable093.139-201-24.que.mc.videotron.ca ([24.201.139.93]
\thelo=cthulhu.gerg.ca)
\tby kronos.mems-exchange.org with esmtp (Exim 4.05)
\tid 17k4h5-00034i-00
\tfor test@mems-exchange.org; Wed, 28 Aug 2002 11:25:20 -0400""")
def test_long_8bit_header(self):
eq = self.ndiffAssertEqual
msg = Message()
h = Header('Britische Regierung gibt', 'iso-8859-1',
header_name='Subject')
h.append('gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte')
msg['Subject'] = h
eq(msg.as_string(), """\
Subject: =?iso-8859-1?q?Britische_Regierung_gibt?= =?iso-8859-1?q?gr=FCnes?=
=?iso-8859-1?q?_Licht_f=FCr_Offshore-Windkraftprojekte?=
""")
def test_long_8bit_header_no_charset(self):
eq = self.ndiffAssertEqual
msg = Message()
msg['Reply-To'] = 'Britische Regierung gibt gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte <a-very-long-address@example.com>'
eq(msg.as_string(), """\
Reply-To: Britische Regierung gibt gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte <a-very-long-address@example.com>
""")
def test_long_to_header(self):
eq = self.ndiffAssertEqual
to = '"Someone Test #A" <someone@eecs.umich.edu>,<someone@eecs.umich.edu>,"Someone Test #B" <someone@umich.edu>, "Someone Test #C" <someone@eecs.umich.edu>, "Someone Test #D" <someone@eecs.umich.edu>'
msg = Message()
msg['To'] = to
eq(msg.as_string(0), '''\
To: "Someone Test #A" <someone@eecs.umich.edu>, <someone@eecs.umich.edu>,
"Someone Test #B" <someone@umich.edu>,
"Someone Test #C" <someone@eecs.umich.edu>,
"Someone Test #D" <someone@eecs.umich.edu>
''')
def test_long_line_after_append(self):
eq = self.ndiffAssertEqual
s = 'This is an example of string which has almost the limit of header length.'
h = Header(s)
h.append('Add another line.')
eq(h.encode(), """\
This is an example of string which has almost the limit of header length.
Add another line.""")
def test_shorter_line_with_append(self):
eq = self.ndiffAssertEqual
s = 'This is a shorter line.'
h = Header(s)
h.append('Add another sentence. (Surprise?)')
eq(h.encode(),
'This is a shorter line. Add another sentence. (Surprise?)')
def test_long_field_name(self):
eq = self.ndiffAssertEqual
fn = 'X-Very-Very-Very-Long-Header-Name'
gs = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
h = Header(gs, 'iso-8859-1', header_name=fn)
# BAW: this seems broken because the first line is too long
eq(h.encode(), """\
=?iso-8859-1?q?Die_Mieter_treten_hier_?=
=?iso-8859-1?q?ein_werden_mit_einem_Foerderband_komfortabel_den_Korridor_?=
=?iso-8859-1?q?entlang=2C_an_s=FCdl=FCndischen_Wandgem=E4lden_vorbei=2C_g?=
=?iso-8859-1?q?egen_die_rotierenden_Klingen_bef=F6rdert=2E_?=""")
def test_long_received_header(self):
h = 'from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by hrothgar.la.mastaler.com (tmda-ofmipd) with ESMTP; Wed, 05 Mar 2003 18:10:18 -0700'
msg = Message()
msg['Received-1'] = Header(h, continuation_ws='\t')
msg['Received-2'] = h
self.assertEqual(msg.as_string(), """\
Received-1: from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by
\throthgar.la.mastaler.com (tmda-ofmipd) with ESMTP;
\tWed, 05 Mar 2003 18:10:18 -0700
Received-2: from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by
hrothgar.la.mastaler.com (tmda-ofmipd) with ESMTP;
Wed, 05 Mar 2003 18:10:18 -0700
""")
def test_string_headerinst_eq(self):
h = '<15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de> (David Bremner\'s message of "Thu, 6 Mar 2003 13:58:21 +0100")'
msg = Message()
msg['Received'] = Header(h, header_name='Received',
continuation_ws='\t')
msg['Received'] = h
self.ndiffAssertEqual(msg.as_string(), """\
Received: <15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de>
\t(David Bremner's message of "Thu, 6 Mar 2003 13:58:21 +0100")
Received: <15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de>
(David Bremner's message of "Thu, 6 Mar 2003 13:58:21 +0100")
""")
def test_long_unbreakable_lines_with_continuation(self):
eq = self.ndiffAssertEqual
msg = Message()
t = """\
iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
locQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp"""
msg['Face-1'] = t
msg['Face-2'] = Header(t, header_name='Face-2')
eq(msg.as_string(), """\
Face-1: iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
locQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp
Face-2: iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
locQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp
""")
def test_another_long_multiline_header(self):
eq = self.ndiffAssertEqual
m = '''\
Received: from siimage.com ([172.25.1.3]) by zima.siliconimage.com with Microsoft SMTPSVC(5.0.2195.4905);
Wed, 16 Oct 2002 07:41:11 -0700'''
msg = email.message_from_string(m)
eq(msg.as_string(), '''\
Received: from siimage.com ([172.25.1.3]) by zima.siliconimage.com with
Microsoft SMTPSVC(5.0.2195.4905); Wed, 16 Oct 2002 07:41:11 -0700
''')
def test_long_lines_with_different_header(self):
eq = self.ndiffAssertEqual
h = """\
List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
<mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>"""
msg = Message()
msg['List'] = h
msg['List'] = Header(h, header_name='List')
eq(msg.as_string(), """\
List: List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
<mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>
List: List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
<mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>
""")
# Test mangling of "From " lines in the body of a message
class TestFromMangling(unittest.TestCase):
def setUp(self):
self.msg = Message()
self.msg['From'] = 'aaa@bbb.org'
self.msg.set_payload("""\
From the desk of A.A.A.:
Blah blah blah
""")
def test_mangled_from(self):
s = StringIO()
g = Generator(s, mangle_from_=True)
g.flatten(self.msg)
self.assertEqual(s.getvalue(), """\
From: aaa@bbb.org
>From the desk of A.A.A.:
Blah blah blah
""")
def test_dont_mangle_from(self):
s = StringIO()
g = Generator(s, mangle_from_=False)
g.flatten(self.msg)
self.assertEqual(s.getvalue(), """\
From: aaa@bbb.org
From the desk of A.A.A.:
Blah blah blah
""")
# Test the basic MIMEAudio class
class TestMIMEAudio(unittest.TestCase):
def setUp(self):
# Make sure we pick up the audiotest.au that lives in email/test/data.
# In Python, there's an audiotest.au living in Lib/test but that isn't
# included in some binary distros that don't include the test
# package. The trailing empty string on the .join() is significant
# since findfile() will do a dirname().
datadir = os.path.join(os.path.dirname(landmark), 'data', '')
fp = open(findfile('audiotest.au', datadir), 'rb')
try:
self._audiodata = fp.read()
finally:
fp.close()
self._au = MIMEAudio(self._audiodata)
def test_guess_minor_type(self):
self.assertEqual(self._au.get_content_type(), 'audio/basic')
def test_encoding(self):
payload = self._au.get_payload()
self.assertEqual(base64.decodestring(payload), self._audiodata)
def test_checkSetMinor(self):
au = MIMEAudio(self._audiodata, 'fish')
self.assertEqual(au.get_content_type(), 'audio/fish')
def test_add_header(self):
eq = self.assertEqual
unless = self.assertTrue
self._au.add_header('Content-Disposition', 'attachment',
filename='audiotest.au')
eq(self._au['content-disposition'],
'attachment; filename="audiotest.au"')
eq(self._au.get_params(header='content-disposition'),
[('attachment', ''), ('filename', 'audiotest.au')])
eq(self._au.get_param('filename', header='content-disposition'),
'audiotest.au')
missing = []
eq(self._au.get_param('attachment', header='content-disposition'), '')
unless(self._au.get_param('foo', failobj=missing,
header='content-disposition') is missing)
# Try some missing stuff
unless(self._au.get_param('foobar', missing) is missing)
unless(self._au.get_param('attachment', missing,
header='foobar') is missing)
# Test the basic MIMEImage class
class TestMIMEImage(unittest.TestCase):
def setUp(self):
fp = openfile('PyBanner048.gif')
try:
self._imgdata = fp.read()
finally:
fp.close()
self._im = MIMEImage(self._imgdata)
def test_guess_minor_type(self):
self.assertEqual(self._im.get_content_type(), 'image/gif')
def test_encoding(self):
payload = self._im.get_payload()
self.assertEqual(base64.decodestring(payload), self._imgdata)
def test_checkSetMinor(self):
im = MIMEImage(self._imgdata, 'fish')
self.assertEqual(im.get_content_type(), 'image/fish')
def test_add_header(self):
eq = self.assertEqual
unless = self.assertTrue
self._im.add_header('Content-Disposition', 'attachment',
filename='dingusfish.gif')
eq(self._im['content-disposition'],
'attachment; filename="dingusfish.gif"')
eq(self._im.get_params(header='content-disposition'),
[('attachment', ''), ('filename', 'dingusfish.gif')])
eq(self._im.get_param('filename', header='content-disposition'),
'dingusfish.gif')
missing = []
eq(self._im.get_param('attachment', header='content-disposition'), '')
unless(self._im.get_param('foo', failobj=missing,
header='content-disposition') is missing)
# Try some missing stuff
unless(self._im.get_param('foobar', missing) is missing)
unless(self._im.get_param('attachment', missing,
header='foobar') is missing)
# Test the basic MIMEText class
class TestMIMEText(unittest.TestCase):
def setUp(self):
self._msg = MIMEText('hello there')
def test_types(self):
eq = self.assertEqual
unless = self.assertTrue
eq(self._msg.get_content_type(), 'text/plain')
eq(self._msg.get_param('charset'), 'us-ascii')
missing = []
unless(self._msg.get_param('foobar', missing) is missing)
unless(self._msg.get_param('charset', missing, header='foobar')
is missing)
def test_payload(self):
self.assertEqual(self._msg.get_payload(), 'hello there')
self.assertTrue(not self._msg.is_multipart())
def test_charset(self):
eq = self.assertEqual
msg = MIMEText('hello there', _charset='us-ascii')
eq(msg.get_charset().input_charset, 'us-ascii')
eq(msg['content-type'], 'text/plain; charset="us-ascii"')
def test_7bit_unicode_input(self):
eq = self.assertEqual
msg = MIMEText(u'hello there', _charset='us-ascii')
eq(msg.get_charset().input_charset, 'us-ascii')
eq(msg['content-type'], 'text/plain; charset="us-ascii"')
def test_7bit_unicode_input_no_charset(self):
eq = self.assertEqual
msg = MIMEText(u'hello there')
eq(msg.get_charset(), 'us-ascii')
eq(msg['content-type'], 'text/plain; charset="us-ascii"')
self.assertTrue('hello there' in msg.as_string())
def test_8bit_unicode_input(self):
teststr = u'\u043a\u0438\u0440\u0438\u043b\u0438\u0446\u0430'
eq = self.assertEqual
msg = MIMEText(teststr, _charset='utf-8')
eq(msg.get_charset().output_charset, 'utf-8')
eq(msg['content-type'], 'text/plain; charset="utf-8"')
eq(msg.get_payload(decode=True), teststr.encode('utf-8'))
def test_8bit_unicode_input_no_charset(self):
teststr = u'\u043a\u0438\u0440\u0438\u043b\u0438\u0446\u0430'
self.assertRaises(UnicodeEncodeError, MIMEText, teststr)
# Test complicated multipart/* messages
class TestMultipart(TestEmailBase):
def setUp(self):
fp = openfile('PyBanner048.gif')
try:
data = fp.read()
finally:
fp.close()
container = MIMEBase('multipart', 'mixed', boundary='BOUNDARY')
image = MIMEImage(data, name='dingusfish.gif')
image.add_header('content-disposition', 'attachment',
filename='dingusfish.gif')
intro = MIMEText('''\
Hi there,
This is the dingus fish.
''')
container.attach(intro)
container.attach(image)
container['From'] = 'Barry <barry@digicool.com>'
container['To'] = 'Dingus Lovers <cravindogs@cravindogs.com>'
container['Subject'] = 'Here is your dingus fish'
now = 987809702.54848599
timetuple = time.localtime(now)
if timetuple[-1] == 0:
tzsecs = time.timezone
else:
tzsecs = time.altzone
if tzsecs > 0:
sign = '-'
else:
sign = '+'
tzoffset = ' %s%04d' % (sign, tzsecs // 36)
container['Date'] = time.strftime(
'%a, %d %b %Y %H:%M:%S',
time.localtime(now)) + tzoffset
self._msg = container
self._im = image
self._txt = intro
def test_hierarchy(self):
# convenience
eq = self.assertEqual
unless = self.assertTrue
raises = self.assertRaises
# tests
m = self._msg
unless(m.is_multipart())
eq(m.get_content_type(), 'multipart/mixed')
eq(len(m.get_payload()), 2)
raises(IndexError, m.get_payload, 2)
m0 = m.get_payload(0)
m1 = m.get_payload(1)
unless(m0 is self._txt)
unless(m1 is self._im)
eq(m.get_payload(), [m0, m1])
unless(not m0.is_multipart())
unless(not m1.is_multipart())
def test_empty_multipart_idempotent(self):
text = """\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
--BOUNDARY--
"""
msg = Parser().parsestr(text)
self.ndiffAssertEqual(text, msg.as_string())
def test_no_parts_in_a_multipart_with_none_epilogue(self):
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.set_boundary('BOUNDARY')
self.ndiffAssertEqual(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
--BOUNDARY--''')
def test_no_parts_in_a_multipart_with_empty_epilogue(self):
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.preamble = ''
outer.epilogue = ''
outer.set_boundary('BOUNDARY')
self.ndiffAssertEqual(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
--BOUNDARY--
''')
def test_one_part_in_a_multipart(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.set_boundary('BOUNDARY')
msg = MIMEText('hello world')
outer.attach(msg)
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--''')
def test_seq_parts_in_a_multipart_with_empty_preamble(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.preamble = ''
msg = MIMEText('hello world')
outer.attach(msg)
outer.set_boundary('BOUNDARY')
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--''')
def test_seq_parts_in_a_multipart_with_none_preamble(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.preamble = None
msg = MIMEText('hello world')
outer.attach(msg)
outer.set_boundary('BOUNDARY')
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--''')
def test_seq_parts_in_a_multipart_with_none_epilogue(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.epilogue = None
msg = MIMEText('hello world')
outer.attach(msg)
outer.set_boundary('BOUNDARY')
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--''')
def test_seq_parts_in_a_multipart_with_empty_epilogue(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.epilogue = ''
msg = MIMEText('hello world')
outer.attach(msg)
outer.set_boundary('BOUNDARY')
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--
''')
def test_seq_parts_in_a_multipart_with_nl_epilogue(self):
eq = self.ndiffAssertEqual
outer = MIMEBase('multipart', 'mixed')
outer['Subject'] = 'A subject'
outer['To'] = 'aperson@dom.ain'
outer['From'] = 'bperson@dom.ain'
outer.epilogue = '\n'
msg = MIMEText('hello world')
outer.attach(msg)
outer.set_boundary('BOUNDARY')
eq(outer.as_string(), '''\
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME-Version: 1.0
Subject: A subject
To: aperson@dom.ain
From: bperson@dom.ain
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
hello world
--BOUNDARY--
''')
def test_message_external_body(self):
eq = self.assertEqual
msg = self._msgobj('msg_36.txt')
eq(len(msg.get_payload()), 2)
msg1 = msg.get_payload(1)
eq(msg1.get_content_type(), 'multipart/alternative')
eq(len(msg1.get_payload()), 2)
for subpart in msg1.get_payload():
eq(subpart.get_content_type(), 'message/external-body')
eq(len(subpart.get_payload()), 1)
subsubpart = subpart.get_payload(0)
eq(subsubpart.get_content_type(), 'text/plain')
def test_double_boundary(self):
# msg_37.txt is a multipart that contains two dash-boundary's in a
# row. Our interpretation of RFC 2046 calls for ignoring the second
# and subsequent boundaries.
msg = self._msgobj('msg_37.txt')
self.assertEqual(len(msg.get_payload()), 3)
def test_nested_inner_contains_outer_boundary(self):
eq = self.ndiffAssertEqual
# msg_38.txt has an inner part that contains outer boundaries. My
# interpretation of RFC 2046 (based on sections 5.1 and 5.1.2) say
# these are illegal and should be interpreted as unterminated inner
# parts.
msg = self._msgobj('msg_38.txt')
sfp = StringIO()
Iterators._structure(msg, sfp)
eq(sfp.getvalue(), """\
multipart/mixed
multipart/mixed
multipart/alternative
text/plain
text/plain
text/plain
text/plain
""")
def test_nested_with_same_boundary(self):
eq = self.ndiffAssertEqual
# msg 39.txt is similarly evil in that it's got inner parts that use
# the same boundary as outer parts. Again, I believe the way this is
# parsed is closest to the spirit of RFC 2046
msg = self._msgobj('msg_39.txt')
sfp = StringIO()
Iterators._structure(msg, sfp)
eq(sfp.getvalue(), """\
multipart/mixed
multipart/mixed
multipart/alternative
application/octet-stream
application/octet-stream
text/plain
""")
def test_boundary_in_non_multipart(self):
msg = self._msgobj('msg_40.txt')
self.assertEqual(msg.as_string(), '''\
MIME-Version: 1.0
Content-Type: text/html; boundary="--961284236552522269"
----961284236552522269
Content-Type: text/html;
Content-Transfer-Encoding: 7Bit
<html></html>
----961284236552522269--
''')
def test_boundary_with_leading_space(self):
eq = self.assertEqual
msg = email.message_from_string('''\
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary=" XXXX"
-- XXXX
Content-Type: text/plain
-- XXXX
Content-Type: text/plain
-- XXXX--
''')
self.assertTrue(msg.is_multipart())
eq(msg.get_boundary(), ' XXXX')
eq(len(msg.get_payload()), 2)
def test_boundary_without_trailing_newline(self):
m = Parser().parsestr("""\
Content-Type: multipart/mixed; boundary="===============0012394164=="
MIME-Version: 1.0
--===============0012394164==
Content-Type: image/file1.jpg
MIME-Version: 1.0
Content-Transfer-Encoding: base64
YXNkZg==
--===============0012394164==--""")
self.assertEqual(m.get_payload(0).get_payload(), 'YXNkZg==')
# Test some badly formatted messages
class TestNonConformant(TestEmailBase):
def test_parse_missing_minor_type(self):
eq = self.assertEqual
msg = self._msgobj('msg_14.txt')
eq(msg.get_content_type(), 'text/plain')
eq(msg.get_content_maintype(), 'text')
eq(msg.get_content_subtype(), 'plain')
def test_same_boundary_inner_outer(self):
unless = self.assertTrue
msg = self._msgobj('msg_15.txt')
# XXX We can probably eventually do better
inner = msg.get_payload(0)
unless(hasattr(inner, 'defects'))
self.assertEqual(len(inner.defects), 1)
unless(isinstance(inner.defects[0],
Errors.StartBoundaryNotFoundDefect))
def test_multipart_no_boundary(self):
unless = self.assertTrue
msg = self._msgobj('msg_25.txt')
unless(isinstance(msg.get_payload(), str))
self.assertEqual(len(msg.defects), 2)
unless(isinstance(msg.defects[0], Errors.NoBoundaryInMultipartDefect))
unless(isinstance(msg.defects[1],
Errors.MultipartInvariantViolationDefect))
def test_invalid_content_type(self):
eq = self.assertEqual
neq = self.ndiffAssertEqual
msg = Message()
# RFC 2045, $5.2 says invalid yields text/plain
msg['Content-Type'] = 'text'
eq(msg.get_content_maintype(), 'text')
eq(msg.get_content_subtype(), 'plain')
eq(msg.get_content_type(), 'text/plain')
# Clear the old value and try something /really/ invalid
del msg['content-type']
msg['Content-Type'] = 'foo'
eq(msg.get_content_maintype(), 'text')
eq(msg.get_content_subtype(), 'plain')
eq(msg.get_content_type(), 'text/plain')
# Still, make sure that the message is idempotently generated
s = StringIO()
g = Generator(s)
g.flatten(msg)
neq(s.getvalue(), 'Content-Type: foo\n\n')
def test_no_start_boundary(self):
eq = self.ndiffAssertEqual
msg = self._msgobj('msg_31.txt')
eq(msg.get_payload(), """\
--BOUNDARY
Content-Type: text/plain
message 1
--BOUNDARY
Content-Type: text/plain
message 2
--BOUNDARY--
""")
def test_no_separating_blank_line(self):
eq = self.ndiffAssertEqual
msg = self._msgobj('msg_35.txt')
eq(msg.as_string(), """\
From: aperson@dom.ain
To: bperson@dom.ain
Subject: here's something interesting
counter to RFC 2822, there's no separating newline here
""")
def test_lying_multipart(self):
unless = self.assertTrue
msg = self._msgobj('msg_41.txt')
unless(hasattr(msg, 'defects'))
self.assertEqual(len(msg.defects), 2)
unless(isinstance(msg.defects[0], Errors.NoBoundaryInMultipartDefect))
unless(isinstance(msg.defects[1],
Errors.MultipartInvariantViolationDefect))
def test_missing_start_boundary(self):
outer = self._msgobj('msg_42.txt')
# The message structure is:
#
# multipart/mixed
# text/plain
# message/rfc822
# multipart/mixed [*]
#
# [*] This message is missing its start boundary
bad = outer.get_payload(1).get_payload(0)
self.assertEqual(len(bad.defects), 1)
self.assertTrue(isinstance(bad.defects[0],
Errors.StartBoundaryNotFoundDefect))
def test_first_line_is_continuation_header(self):
eq = self.assertEqual
m = ' Line 1\nLine 2\nLine 3'
msg = email.message_from_string(m)
eq(msg.keys(), [])
eq(msg.get_payload(), 'Line 2\nLine 3')
eq(len(msg.defects), 1)
self.assertTrue(isinstance(msg.defects[0],
Errors.FirstHeaderLineIsContinuationDefect))
eq(msg.defects[0].line, ' Line 1\n')
# Test RFC 2047 header encoding and decoding
class TestRFC2047(unittest.TestCase):
def test_rfc2047_multiline(self):
eq = self.assertEqual
s = """Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz
foo bar =?mac-iceland?q?r=8Aksm=9Arg=8Cs?="""
dh = decode_header(s)
eq(dh, [
('Re:', None),
('r\x8aksm\x9arg\x8cs', 'mac-iceland'),
('baz foo bar', None),
('r\x8aksm\x9arg\x8cs', 'mac-iceland')])
eq(str(make_header(dh)),
"""Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz foo bar
=?mac-iceland?q?r=8Aksm=9Arg=8Cs?=""")
def test_whitespace_eater_unicode(self):
eq = self.assertEqual
s = '=?ISO-8859-1?Q?Andr=E9?= Pirard <pirard@dom.ain>'
dh = decode_header(s)
eq(dh, [('Andr\xe9', 'iso-8859-1'), ('Pirard <pirard@dom.ain>', None)])
hu = unicode(make_header(dh)).encode('latin-1')
eq(hu, 'Andr\xe9 Pirard <pirard@dom.ain>')
def test_whitespace_eater_unicode_2(self):
eq = self.assertEqual
s = 'The =?iso-8859-1?b?cXVpY2sgYnJvd24gZm94?= jumped over the =?iso-8859-1?b?bGF6eSBkb2c=?='
dh = decode_header(s)
eq(dh, [('The', None), ('quick brown fox', 'iso-8859-1'),
('jumped over the', None), ('lazy dog', 'iso-8859-1')])
hu = make_header(dh).__unicode__()
eq(hu, u'The quick brown fox jumped over the lazy dog')
def test_rfc2047_without_whitespace(self):
s = 'Sm=?ISO-8859-1?B?9g==?=rg=?ISO-8859-1?B?5Q==?=sbord'
dh = decode_header(s)
self.assertEqual(dh, [(s, None)])
def test_rfc2047_with_whitespace(self):
s = 'Sm =?ISO-8859-1?B?9g==?= rg =?ISO-8859-1?B?5Q==?= sbord'
dh = decode_header(s)
self.assertEqual(dh, [('Sm', None), ('\xf6', 'iso-8859-1'),
('rg', None), ('\xe5', 'iso-8859-1'),
('sbord', None)])
def test_rfc2047_B_bad_padding(self):
s = '=?iso-8859-1?B?%s?='
data = [ # only test complete bytes
('dm==', 'v'), ('dm=', 'v'), ('dm', 'v'),
('dmk=', 'vi'), ('dmk', 'vi')
]
for q, a in data:
dh = decode_header(s % q)
self.assertEqual(dh, [(a, 'iso-8859-1')])
def test_rfc2047_Q_invalid_digits(self):
# issue 10004.
s = '=?iso-8659-1?Q?andr=e9=zz?='
self.assertEqual(decode_header(s),
[(b'andr\xe9=zz', 'iso-8659-1')])
# Test the MIMEMessage class
class TestMIMEMessage(TestEmailBase):
def setUp(self):
fp = openfile('msg_11.txt')
try:
self._text = fp.read()
finally:
fp.close()
def test_type_error(self):
self.assertRaises(TypeError, MIMEMessage, 'a plain string')
def test_valid_argument(self):
eq = self.assertEqual
unless = self.assertTrue
subject = 'A sub-message'
m = Message()
m['Subject'] = subject
r = MIMEMessage(m)
eq(r.get_content_type(), 'message/rfc822')
payload = r.get_payload()
unless(isinstance(payload, list))
eq(len(payload), 1)
subpart = payload[0]
unless(subpart is m)
eq(subpart['subject'], subject)
def test_bad_multipart(self):
eq = self.assertEqual
msg1 = Message()
msg1['Subject'] = 'subpart 1'
msg2 = Message()
msg2['Subject'] = 'subpart 2'
r = MIMEMessage(msg1)
self.assertRaises(Errors.MultipartConversionError, r.attach, msg2)
def test_generate(self):
# First craft the message to be encapsulated
m = Message()
m['Subject'] = 'An enclosed message'
m.set_payload('Here is the body of the message.\n')
r = MIMEMessage(m)
r['Subject'] = 'The enclosing message'
s = StringIO()
g = Generator(s)
g.flatten(r)
self.assertEqual(s.getvalue(), """\
Content-Type: message/rfc822
MIME-Version: 1.0
Subject: The enclosing message
Subject: An enclosed message
Here is the body of the message.
""")
def test_parse_message_rfc822(self):
eq = self.assertEqual
unless = self.assertTrue
msg = self._msgobj('msg_11.txt')
eq(msg.get_content_type(), 'message/rfc822')
payload = msg.get_payload()
unless(isinstance(payload, list))
eq(len(payload), 1)
submsg = payload[0]
self.assertTrue(isinstance(submsg, Message))
eq(submsg['subject'], 'An enclosed message')
eq(submsg.get_payload(), 'Here is the body of the message.\n')
def test_dsn(self):
eq = self.assertEqual
unless = self.assertTrue
# msg 16 is a Delivery Status Notification, see RFC 1894
msg = self._msgobj('msg_16.txt')
eq(msg.get_content_type(), 'multipart/report')
unless(msg.is_multipart())
eq(len(msg.get_payload()), 3)
# Subpart 1 is a text/plain, human readable section
subpart = msg.get_payload(0)
eq(subpart.get_content_type(), 'text/plain')
eq(subpart.get_payload(), """\
This report relates to a message you sent with the following header fields:
Message-id: <002001c144a6$8752e060$56104586@oxy.edu>
Date: Sun, 23 Sep 2001 20:10:55 -0700
From: "Ian T. Henry" <henryi@oxy.edu>
To: SoCal Raves <scr@socal-raves.org>
Subject: [scr] yeah for Ians!!
Your message cannot be delivered to the following recipients:
Recipient address: jangel1@cougar.noc.ucla.edu
Reason: recipient reached disk quota
""")
# Subpart 2 contains the machine parsable DSN information. It
# consists of two blocks of headers, represented by two nested Message
# objects.
subpart = msg.get_payload(1)
eq(subpart.get_content_type(), 'message/delivery-status')
eq(len(subpart.get_payload()), 2)
# message/delivery-status should treat each block as a bunch of
# headers, i.e. a bunch of Message objects.
dsn1 = subpart.get_payload(0)
unless(isinstance(dsn1, Message))
eq(dsn1['original-envelope-id'], '0GK500B4HD0888@cougar.noc.ucla.edu')
eq(dsn1.get_param('dns', header='reporting-mta'), '')
# Try a missing one <wink>
eq(dsn1.get_param('nsd', header='reporting-mta'), None)
dsn2 = subpart.get_payload(1)
unless(isinstance(dsn2, Message))
eq(dsn2['action'], 'failed')
eq(dsn2.get_params(header='original-recipient'),
[('rfc822', ''), ('jangel1@cougar.noc.ucla.edu', '')])
eq(dsn2.get_param('rfc822', header='final-recipient'), '')
# Subpart 3 is the original message
subpart = msg.get_payload(2)
eq(subpart.get_content_type(), 'message/rfc822')
payload = subpart.get_payload()
unless(isinstance(payload, list))
eq(len(payload), 1)
subsubpart = payload[0]
unless(isinstance(subsubpart, Message))
eq(subsubpart.get_content_type(), 'text/plain')
eq(subsubpart['message-id'],
'<002001c144a6$8752e060$56104586@oxy.edu>')
def test_epilogue(self):
eq = self.ndiffAssertEqual
fp = openfile('msg_21.txt')
try:
text = fp.read()
finally:
fp.close()
msg = Message()
msg['From'] = 'aperson@dom.ain'
msg['To'] = 'bperson@dom.ain'
msg['Subject'] = 'Test'
msg.preamble = 'MIME message'
msg.epilogue = 'End of MIME message\n'
msg1 = MIMEText('One')
msg2 = MIMEText('Two')
msg.add_header('Content-Type', 'multipart/mixed', boundary='BOUNDARY')
msg.attach(msg1)
msg.attach(msg2)
sfp = StringIO()
g = Generator(sfp)
g.flatten(msg)
eq(sfp.getvalue(), text)
def test_no_nl_preamble(self):
eq = self.ndiffAssertEqual
msg = Message()
msg['From'] = 'aperson@dom.ain'
msg['To'] = 'bperson@dom.ain'
msg['Subject'] = 'Test'
msg.preamble = 'MIME message'
msg.epilogue = ''
msg1 = MIMEText('One')
msg2 = MIMEText('Two')
msg.add_header('Content-Type', 'multipart/mixed', boundary='BOUNDARY')
msg.attach(msg1)
msg.attach(msg2)
eq(msg.as_string(), """\
From: aperson@dom.ain
To: bperson@dom.ain
Subject: Test
Content-Type: multipart/mixed; boundary="BOUNDARY"
MIME message
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
One
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Two
--BOUNDARY--
""")
def test_default_type(self):
eq = self.assertEqual
fp = openfile('msg_30.txt')
try:
msg = email.message_from_file(fp)
finally:
fp.close()
container1 = msg.get_payload(0)
eq(container1.get_default_type(), 'message/rfc822')
eq(container1.get_content_type(), 'message/rfc822')
container2 = msg.get_payload(1)
eq(container2.get_default_type(), 'message/rfc822')
eq(container2.get_content_type(), 'message/rfc822')
container1a = container1.get_payload(0)
eq(container1a.get_default_type(), 'text/plain')
eq(container1a.get_content_type(), 'text/plain')
container2a = container2.get_payload(0)
eq(container2a.get_default_type(), 'text/plain')
eq(container2a.get_content_type(), 'text/plain')
def test_default_type_with_explicit_container_type(self):
eq = self.assertEqual
fp = openfile('msg_28.txt')
try:
msg = email.message_from_file(fp)
finally:
fp.close()
container1 = msg.get_payload(0)
eq(container1.get_default_type(), 'message/rfc822')
eq(container1.get_content_type(), 'message/rfc822')
container2 = msg.get_payload(1)
eq(container2.get_default_type(), 'message/rfc822')
eq(container2.get_content_type(), 'message/rfc822')
container1a = container1.get_payload(0)
eq(container1a.get_default_type(), 'text/plain')
eq(container1a.get_content_type(), 'text/plain')
container2a = container2.get_payload(0)
eq(container2a.get_default_type(), 'text/plain')
eq(container2a.get_content_type(), 'text/plain')
def test_default_type_non_parsed(self):
eq = self.assertEqual
neq = self.ndiffAssertEqual
# Set up container
container = MIMEMultipart('digest', 'BOUNDARY')
container.epilogue = ''
# Set up subparts
subpart1a = MIMEText('message 1\n')
subpart2a = MIMEText('message 2\n')
subpart1 = MIMEMessage(subpart1a)
subpart2 = MIMEMessage(subpart2a)
container.attach(subpart1)
container.attach(subpart2)
eq(subpart1.get_content_type(), 'message/rfc822')
eq(subpart1.get_default_type(), 'message/rfc822')
eq(subpart2.get_content_type(), 'message/rfc822')
eq(subpart2.get_default_type(), 'message/rfc822')
neq(container.as_string(0), '''\
Content-Type: multipart/digest; boundary="BOUNDARY"
MIME-Version: 1.0
--BOUNDARY
Content-Type: message/rfc822
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
message 1
--BOUNDARY
Content-Type: message/rfc822
MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
message 2
--BOUNDARY--
''')
del subpart1['content-type']
del subpart1['mime-version']
del subpart2['content-type']
del subpart2['mime-version']
eq(subpart1.get_content_type(), 'message/rfc822')
eq(subpart1.get_default_type(), 'message/rfc822')
eq(subpart2.get_content_type(), 'message/rfc822')
eq(subpart2.get_default_type(), 'message/rfc822')
neq(container.as_string(0), '''\
Content-Type: multipart/digest; boundary="BOUNDARY"
MIME-Version: 1.0
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
message 1
--BOUNDARY
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
message 2
--BOUNDARY--
''')
def test_mime_attachments_in_constructor(self):
eq = self.assertEqual
text1 = MIMEText('')
text2 = MIMEText('')
msg = MIMEMultipart(_subparts=(text1, text2))
eq(len(msg.get_payload()), 2)
eq(msg.get_payload(0), text1)
eq(msg.get_payload(1), text2)
def test_default_multipart_constructor(self):
msg = MIMEMultipart()
self.assertTrue(msg.is_multipart())
# A general test of parser->model->generator idempotency. IOW, read a message
# in, parse it into a message object tree, then without touching the tree,
# regenerate the plain text. The original text and the transformed text
# should be identical. Note: that we ignore the Unix-From since that may
# contain a changed date.
class TestIdempotent(TestEmailBase):
def _msgobj(self, filename):
fp = openfile(filename)
try:
data = fp.read()
finally:
fp.close()
msg = email.message_from_string(data)
return msg, data
def _idempotent(self, msg, text):
eq = self.ndiffAssertEqual
s = StringIO()
g = Generator(s, maxheaderlen=0)
g.flatten(msg)
eq(text, s.getvalue())
def test_parse_text_message(self):
eq = self.assertEqual
msg, text = self._msgobj('msg_01.txt')
eq(msg.get_content_type(), 'text/plain')
eq(msg.get_content_maintype(), 'text')
eq(msg.get_content_subtype(), 'plain')
eq(msg.get_params()[1], ('charset', 'us-ascii'))
eq(msg.get_param('charset'), 'us-ascii')
eq(msg.preamble, None)
eq(msg.epilogue, None)
self._idempotent(msg, text)
def test_parse_untyped_message(self):
eq = self.assertEqual
msg, text = self._msgobj('msg_03.txt')
eq(msg.get_content_type(), 'text/plain')
eq(msg.get_params(), None)
eq(msg.get_param('charset'), None)
self._idempotent(msg, text)
def test_simple_multipart(self):
msg, text = self._msgobj('msg_04.txt')
self._idempotent(msg, text)
def test_MIME_digest(self):
msg, text = self._msgobj('msg_02.txt')
self._idempotent(msg, text)
def test_long_header(self):
msg, text = self._msgobj('msg_27.txt')
self._idempotent(msg, text)
def test_MIME_digest_with_part_headers(self):
msg, text = self._msgobj('msg_28.txt')
self._idempotent(msg, text)
def test_mixed_with_image(self):
msg, text = self._msgobj('msg_06.txt')
self._idempotent(msg, text)
def test_multipart_report(self):
msg, text = self._msgobj('msg_05.txt')
self._idempotent(msg, text)
def test_dsn(self):
msg, text = self._msgobj('msg_16.txt')
self._idempotent(msg, text)
def test_preamble_epilogue(self):
msg, text = self._msgobj('msg_21.txt')
self._idempotent(msg, text)
def test_multipart_one_part(self):
msg, text = self._msgobj('msg_23.txt')
self._idempotent(msg, text)
def test_multipart_no_parts(self):
msg, text = self._msgobj('msg_24.txt')
self._idempotent(msg, text)
def test_no_start_boundary(self):
msg, text = self._msgobj('msg_31.txt')
self._idempotent(msg, text)
def test_rfc2231_charset(self):
msg, text = self._msgobj('msg_32.txt')
self._idempotent(msg, text)
def test_more_rfc2231_parameters(self):
msg, text = self._msgobj('msg_33.txt')
self._idempotent(msg, text)
def test_text_plain_in_a_multipart_digest(self):
msg, text = self._msgobj('msg_34.txt')
self._idempotent(msg, text)
def test_nested_multipart_mixeds(self):
msg, text = self._msgobj('msg_12a.txt')
self._idempotent(msg, text)
def test_message_external_body_idempotent(self):
msg, text = self._msgobj('msg_36.txt')
self._idempotent(msg, text)
def test_content_type(self):
eq = self.assertEqual
unless = self.assertTrue
# Get a message object and reset the seek pointer for other tests
msg, text = self._msgobj('msg_05.txt')
eq(msg.get_content_type(), 'multipart/report')
# Test the Content-Type: parameters
params = {}
for pk, pv in msg.get_params():
params[pk] = pv
eq(params['report-type'], 'delivery-status')
eq(params['boundary'], 'D1690A7AC1.996856090/mail.example.com')
eq(msg.preamble, 'This is a MIME-encapsulated message.\n')
eq(msg.epilogue, '\n')
eq(len(msg.get_payload()), 3)
# Make sure the subparts are what we expect
msg1 = msg.get_payload(0)
eq(msg1.get_content_type(), 'text/plain')
eq(msg1.get_payload(), 'Yadda yadda yadda\n')
msg2 = msg.get_payload(1)
eq(msg2.get_content_type(), 'text/plain')
eq(msg2.get_payload(), 'Yadda yadda yadda\n')
msg3 = msg.get_payload(2)
eq(msg3.get_content_type(), 'message/rfc822')
self.assertTrue(isinstance(msg3, Message))
payload = msg3.get_payload()
unless(isinstance(payload, list))
eq(len(payload), 1)
msg4 = payload[0]
unless(isinstance(msg4, Message))
eq(msg4.get_payload(), 'Yadda yadda yadda\n')
def test_parser(self):
eq = self.assertEqual
unless = self.assertTrue
msg, text = self._msgobj('msg_06.txt')
# Check some of the outer headers
eq(msg.get_content_type(), 'message/rfc822')
# Make sure the payload is a list of exactly one sub-Message, and that
# that submessage has a type of text/plain
payload = msg.get_payload()
unless(isinstance(payload, list))
eq(len(payload), 1)
msg1 = payload[0]
self.assertTrue(isinstance(msg1, Message))
eq(msg1.get_content_type(), 'text/plain')
self.assertTrue(isinstance(msg1.get_payload(), str))
eq(msg1.get_payload(), '\n')
# Test various other bits of the package's functionality
class TestMiscellaneous(TestEmailBase):
def test_message_from_string(self):
fp = openfile('msg_01.txt')
try:
text = fp.read()
finally:
fp.close()
msg = email.message_from_string(text)
s = StringIO()
# Don't wrap/continue long headers since we're trying to test
# idempotency.
g = Generator(s, maxheaderlen=0)
g.flatten(msg)
self.assertEqual(text, s.getvalue())
def test_message_from_file(self):
fp = openfile('msg_01.txt')
try:
text = fp.read()
fp.seek(0)
msg = email.message_from_file(fp)
s = StringIO()
# Don't wrap/continue long headers since we're trying to test
# idempotency.
g = Generator(s, maxheaderlen=0)
g.flatten(msg)
self.assertEqual(text, s.getvalue())
finally:
fp.close()
def test_message_from_string_with_class(self):
unless = self.assertTrue
fp = openfile('msg_01.txt')
try:
text = fp.read()
finally:
fp.close()
# Create a subclass
class MyMessage(Message):
pass
msg = email.message_from_string(text, MyMessage)
unless(isinstance(msg, MyMessage))
# Try something more complicated
fp = openfile('msg_02.txt')
try:
text = fp.read()
finally:
fp.close()
msg = email.message_from_string(text, MyMessage)
for subpart in msg.walk():
unless(isinstance(subpart, MyMessage))
def test_message_from_file_with_class(self):
unless = self.assertTrue
# Create a subclass
class MyMessage(Message):
pass
fp = openfile('msg_01.txt')
try:
msg = email.message_from_file(fp, MyMessage)
finally:
fp.close()
unless(isinstance(msg, MyMessage))
# Try something more complicated
fp = openfile('msg_02.txt')
try:
msg = email.message_from_file(fp, MyMessage)
finally:
fp.close()
for subpart in msg.walk():
unless(isinstance(subpart, MyMessage))
def test__all__(self):
module = __import__('email')
all = module.__all__
all.sort()
self.assertEqual(all, [
# Old names
'Charset', 'Encoders', 'Errors', 'Generator',
'Header', 'Iterators', 'MIMEAudio', 'MIMEBase',
'MIMEImage', 'MIMEMessage', 'MIMEMultipart',
'MIMENonMultipart', 'MIMEText', 'Message',
'Parser', 'Utils', 'base64MIME',
# new names
'base64mime', 'charset', 'encoders', 'errors', 'generator',
'header', 'iterators', 'message', 'message_from_file',
'message_from_string', 'mime', 'parser',
'quopriMIME', 'quoprimime', 'utils',
])
def test_formatdate(self):
now = time.time()
self.assertEqual(Utils.parsedate(Utils.formatdate(now))[:6],
time.gmtime(now)[:6])
def test_formatdate_localtime(self):
now = time.time()
self.assertEqual(
Utils.parsedate(Utils.formatdate(now, localtime=True))[:6],
time.localtime(now)[:6])
def test_formatdate_usegmt(self):
now = time.time()
self.assertEqual(
Utils.formatdate(now, localtime=False),
time.strftime('%a, %d %b %Y %H:%M:%S -0000', time.gmtime(now)))
self.assertEqual(
Utils.formatdate(now, localtime=False, usegmt=True),
time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(now)))
def test_parsedate_none(self):
self.assertEqual(Utils.parsedate(''), None)
def test_parsedate_compact(self):
# The FWS after the comma is optional
self.assertEqual(Utils.parsedate('Wed,3 Apr 2002 14:58:26 +0800'),
Utils.parsedate('Wed, 3 Apr 2002 14:58:26 +0800'))
def test_parsedate_no_dayofweek(self):
eq = self.assertEqual
eq(Utils.parsedate_tz('25 Feb 2003 13:47:26 -0800'),
(2003, 2, 25, 13, 47, 26, 0, 1, -1, -28800))
def test_parsedate_compact_no_dayofweek(self):
eq = self.assertEqual
eq(Utils.parsedate_tz('5 Feb 2003 13:47:26 -0800'),
(2003, 2, 5, 13, 47, 26, 0, 1, -1, -28800))
def test_parsedate_acceptable_to_time_functions(self):
eq = self.assertEqual
timetup = Utils.parsedate('5 Feb 2003 13:47:26 -0800')
t = int(time.mktime(timetup))
eq(time.localtime(t)[:6], timetup[:6])
eq(int(time.strftime('%Y', timetup)), 2003)
timetup = Utils.parsedate_tz('5 Feb 2003 13:47:26 -0800')
t = int(time.mktime(timetup[:9]))
eq(time.localtime(t)[:6], timetup[:6])
eq(int(time.strftime('%Y', timetup[:9])), 2003)
def test_parsedate_y2k(self):
"""Test for parsing a date with a two-digit year.
Parsing a date with a two-digit year should return the correct
four-digit year. RFC822 allows two-digit years, but RFC2822 (which
obsoletes RFC822) requires four-digit years.
"""
self.assertEqual(Utils.parsedate_tz('25 Feb 03 13:47:26 -0800'),
Utils.parsedate_tz('25 Feb 2003 13:47:26 -0800'))
self.assertEqual(Utils.parsedate_tz('25 Feb 71 13:47:26 -0800'),
Utils.parsedate_tz('25 Feb 1971 13:47:26 -0800'))
def test_parseaddr_empty(self):
self.assertEqual(Utils.parseaddr('<>'), ('', ''))
self.assertEqual(Utils.formataddr(Utils.parseaddr('<>')), '')
def test_noquote_dump(self):
self.assertEqual(
Utils.formataddr(('A Silly Person', 'person@dom.ain')),
'A Silly Person <person@dom.ain>')
def test_escape_dump(self):
self.assertEqual(
Utils.formataddr(('A (Very) Silly Person', 'person@dom.ain')),
r'"A \(Very\) Silly Person" <person@dom.ain>')
a = r'A \(Special\) Person'
b = 'person@dom.ain'
self.assertEqual(Utils.parseaddr(Utils.formataddr((a, b))), (a, b))
def test_escape_backslashes(self):
self.assertEqual(
Utils.formataddr(('Arthur \Backslash\ Foobar', 'person@dom.ain')),
r'"Arthur \\Backslash\\ Foobar" <person@dom.ain>')
a = r'Arthur \Backslash\ Foobar'
b = 'person@dom.ain'
self.assertEqual(Utils.parseaddr(Utils.formataddr((a, b))), (a, b))
def test_name_with_dot(self):
x = 'John X. Doe <jxd@example.com>'
y = '"John X. Doe" <jxd@example.com>'
a, b = ('John X. Doe', 'jxd@example.com')
self.assertEqual(Utils.parseaddr(x), (a, b))
self.assertEqual(Utils.parseaddr(y), (a, b))
# formataddr() quotes the name if there's a dot in it
self.assertEqual(Utils.formataddr((a, b)), y)
def test_parseaddr_preserves_quoted_pairs_in_addresses(self):
# issue 10005. Note that in the third test the second pair of
# backslashes is not actually a quoted pair because it is not inside a
# comment or quoted string: the address being parsed has a quoted
# string containing a quoted backslash, followed by 'example' and two
# backslashes, followed by another quoted string containing a space and
# the word 'example'. parseaddr copies those two backslashes
# literally. Per rfc5322 this is not technically correct since a \ may
# not appear in an address outside of a quoted string. It is probably
# a sensible Postel interpretation, though.
eq = self.assertEqual
eq(Utils.parseaddr('""example" example"@example.com'),
('', '""example" example"@example.com'))
eq(Utils.parseaddr('"\\"example\\" example"@example.com'),
('', '"\\"example\\" example"@example.com'))
eq(Utils.parseaddr('"\\\\"example\\\\" example"@example.com'),
('', '"\\\\"example\\\\" example"@example.com'))
def test_multiline_from_comment(self):
x = """\
Foo
\tBar <foo@example.com>"""
self.assertEqual(Utils.parseaddr(x), ('Foo Bar', 'foo@example.com'))
def test_quote_dump(self):
self.assertEqual(
Utils.formataddr(('A Silly; Person', 'person@dom.ain')),
r'"A Silly; Person" <person@dom.ain>')
def test_fix_eols(self):
eq = self.assertEqual
eq(Utils.fix_eols('hello'), 'hello')
eq(Utils.fix_eols('hello\n'), 'hello\r\n')
eq(Utils.fix_eols('hello\r'), 'hello\r\n')
eq(Utils.fix_eols('hello\r\n'), 'hello\r\n')
eq(Utils.fix_eols('hello\n\r'), 'hello\r\n\r\n')
def test_charset_richcomparisons(self):
eq = self.assertEqual
ne = self.assertNotEqual
cset1 = Charset()
cset2 = Charset()
eq(cset1, 'us-ascii')
eq(cset1, 'US-ASCII')
eq(cset1, 'Us-AsCiI')
eq('us-ascii', cset1)
eq('US-ASCII', cset1)
eq('Us-AsCiI', cset1)
ne(cset1, 'usascii')
ne(cset1, 'USASCII')
ne(cset1, 'UsAsCiI')
ne('usascii', cset1)
ne('USASCII', cset1)
ne('UsAsCiI', cset1)
eq(cset1, cset2)
eq(cset2, cset1)
def test_getaddresses(self):
eq = self.assertEqual
eq(Utils.getaddresses(['aperson@dom.ain (Al Person)',
'Bud Person <bperson@dom.ain>']),
[('Al Person', 'aperson@dom.ain'),
('Bud Person', 'bperson@dom.ain')])
def test_getaddresses_nasty(self):
eq = self.assertEqual
eq(Utils.getaddresses(['foo: ;']), [('', '')])
eq(Utils.getaddresses(
['[]*-- =~$']),
[('', ''), ('', ''), ('', '*--')])
eq(Utils.getaddresses(
['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
[('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
def test_getaddresses_embedded_comment(self):
"""Test proper handling of a nested comment"""
eq = self.assertEqual
addrs = Utils.getaddresses(['User ((nested comment)) <foo@bar.com>'])
eq(addrs[0][1], 'foo@bar.com')
def test_utils_quote_unquote(self):
eq = self.assertEqual
msg = Message()
msg.add_header('content-disposition', 'attachment',
filename='foo\\wacky"name')
eq(msg.get_filename(), 'foo\\wacky"name')
def test_get_body_encoding_with_bogus_charset(self):
charset = Charset('not a charset')
self.assertEqual(charset.get_body_encoding(), 'base64')
def test_get_body_encoding_with_uppercase_charset(self):
eq = self.assertEqual
msg = Message()
msg['Content-Type'] = 'text/plain; charset=UTF-8'
eq(msg['content-type'], 'text/plain; charset=UTF-8')
charsets = msg.get_charsets()
eq(len(charsets), 1)
eq(charsets[0], 'utf-8')
charset = Charset(charsets[0])
eq(charset.get_body_encoding(), 'base64')
msg.set_payload('hello world', charset=charset)
eq(msg.get_payload(), 'aGVsbG8gd29ybGQ=\n')
eq(msg.get_payload(decode=True), 'hello world')
eq(msg['content-transfer-encoding'], 'base64')
# Try another one
msg = Message()
msg['Content-Type'] = 'text/plain; charset="US-ASCII"'
charsets = msg.get_charsets()
eq(len(charsets), 1)
eq(charsets[0], 'us-ascii')
charset = Charset(charsets[0])
eq(charset.get_body_encoding(), Encoders.encode_7or8bit)
msg.set_payload('hello world', charset=charset)
eq(msg.get_payload(), 'hello world')
eq(msg['content-transfer-encoding'], '7bit')
def test_charsets_case_insensitive(self):
lc = Charset('us-ascii')
uc = Charset('US-ASCII')
self.assertEqual(lc.get_body_encoding(), uc.get_body_encoding())
def test_partial_falls_inside_message_delivery_status(self):
eq = self.ndiffAssertEqual
# The Parser interface provides chunks of data to FeedParser in 8192
# byte gulps. SF bug #1076485 found one of those chunks inside
# message/delivery-status header block, which triggered an
# unreadline() of NeedMoreData.
msg = self._msgobj('msg_43.txt')
sfp = StringIO()
Iterators._structure(msg, sfp)
eq(sfp.getvalue(), """\
multipart/report
text/plain
message/delivery-status
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/plain
text/rfc822-headers
""")
# Test the iterator/generators
class TestIterators(TestEmailBase):
def test_body_line_iterator(self):
eq = self.assertEqual
neq = self.ndiffAssertEqual
# First a simple non-multipart message
msg = self._msgobj('msg_01.txt')
it = Iterators.body_line_iterator(msg)
lines = list(it)
eq(len(lines), 6)
neq(EMPTYSTRING.join(lines), msg.get_payload())
# Now a more complicated multipart
msg = self._msgobj('msg_02.txt')
it = Iterators.body_line_iterator(msg)
lines = list(it)
eq(len(lines), 43)
fp = openfile('msg_19.txt')
try:
neq(EMPTYSTRING.join(lines), fp.read())
finally:
fp.close()
def test_typed_subpart_iterator(self):
eq = self.assertEqual
msg = self._msgobj('msg_04.txt')
it = Iterators.typed_subpart_iterator(msg, 'text')
lines = []
subparts = 0
for subpart in it:
subparts += 1
lines.append(subpart.get_payload())
eq(subparts, 2)
eq(EMPTYSTRING.join(lines), """\
a simple kind of mirror
to reflect upon our own
a simple kind of mirror
to reflect upon our own
""")
def test_typed_subpart_iterator_default_type(self):
eq = self.assertEqual
msg = self._msgobj('msg_03.txt')
it = Iterators.typed_subpart_iterator(msg, 'text', 'plain')
lines = []
subparts = 0
for subpart in it:
subparts += 1
lines.append(subpart.get_payload())
eq(subparts, 1)
eq(EMPTYSTRING.join(lines), """\
Hi,
Do you like this message?
-Me
""")
def test_pushCR_LF(self):
'''FeedParser BufferedSubFile.push() assumed it received complete
line endings. A CR ending one push() followed by a LF starting
the next push() added an empty line.
'''
imt = [
("a\r \n", 2),
("b", 0),
("c\n", 1),
("", 0),
("d\r\n", 1),
("e\r", 0),
("\nf", 1),
("\r\n", 1),
]
from email.feedparser import BufferedSubFile, NeedMoreData
bsf = BufferedSubFile()
om = []
nt = 0
for il, n in imt:
bsf.push(il)
nt += n
n1 = 0
while True:
ol = bsf.readline()
if ol == NeedMoreData:
break
om.append(ol)
n1 += 1
self.assertTrue(n == n1)
self.assertTrue(len(om) == nt)
self.assertTrue(''.join([il for il, n in imt]) == ''.join(om))
class TestParsers(TestEmailBase):
def test_header_parser(self):
eq = self.assertEqual
# Parse only the headers of a complex multipart MIME document
fp = openfile('msg_02.txt')
try:
msg = HeaderParser().parse(fp)
finally:
fp.close()
eq(msg['from'], 'ppp-request@zzz.org')
eq(msg['to'], 'ppp@zzz.org')
eq(msg.get_content_type(), 'multipart/mixed')
self.assertFalse(msg.is_multipart())
self.assertTrue(isinstance(msg.get_payload(), str))
def test_whitespace_continuation(self):
eq = self.assertEqual
# This message contains a line after the Subject: header that has only
# whitespace, but it is not empty!
msg = email.message_from_string("""\
From: aperson@dom.ain
To: bperson@dom.ain
Subject: the next line has a space on it
\x20
Date: Mon, 8 Apr 2002 15:09:19 -0400
Message-ID: spam
Here's the message body
""")
eq(msg['subject'], 'the next line has a space on it\n ')
eq(msg['message-id'], 'spam')
eq(msg.get_payload(), "Here's the message body\n")
def test_whitespace_continuation_last_header(self):
eq = self.assertEqual
# Like the previous test, but the subject line is the last
# header.
msg = email.message_from_string("""\
From: aperson@dom.ain
To: bperson@dom.ain
Date: Mon, 8 Apr 2002 15:09:19 -0400
Message-ID: spam
Subject: the next line has a space on it
\x20
Here's the message body
""")
eq(msg['subject'], 'the next line has a space on it\n ')
eq(msg['message-id'], 'spam')
eq(msg.get_payload(), "Here's the message body\n")
def test_crlf_separation(self):
eq = self.assertEqual
fp = openfile('msg_26.txt', mode='rb')
try:
msg = Parser().parse(fp)
finally:
fp.close()
eq(len(msg.get_payload()), 2)
part1 = msg.get_payload(0)
eq(part1.get_content_type(), 'text/plain')
eq(part1.get_payload(), 'Simple email with attachment.\r\n\r\n')
part2 = msg.get_payload(1)
eq(part2.get_content_type(), 'application/riscos')
def test_multipart_digest_with_extra_mime_headers(self):
eq = self.assertEqual
neq = self.ndiffAssertEqual
fp = openfile('msg_28.txt')
try:
msg = email.message_from_file(fp)
finally:
fp.close()
# Structure is:
# multipart/digest
# message/rfc822
# text/plain
# message/rfc822
# text/plain
eq(msg.is_multipart(), 1)
eq(len(msg.get_payload()), 2)
part1 = msg.get_payload(0)
eq(part1.get_content_type(), 'message/rfc822')
eq(part1.is_multipart(), 1)
eq(len(part1.get_payload()), 1)
part1a = part1.get_payload(0)
eq(part1a.is_multipart(), 0)
eq(part1a.get_content_type(), 'text/plain')
neq(part1a.get_payload(), 'message 1\n')
# next message/rfc822
part2 = msg.get_payload(1)
eq(part2.get_content_type(), 'message/rfc822')
eq(part2.is_multipart(), 1)
eq(len(part2.get_payload()), 1)
part2a = part2.get_payload(0)
eq(part2a.is_multipart(), 0)
eq(part2a.get_content_type(), 'text/plain')
neq(part2a.get_payload(), 'message 2\n')
def test_three_lines(self):
# A bug report by Andrew McNamara
lines = ['From: Andrew Person <aperson@dom.ain',
'Subject: Test',
'Date: Tue, 20 Aug 2002 16:43:45 +1000']
msg = email.message_from_string(NL.join(lines))
self.assertEqual(msg['date'], 'Tue, 20 Aug 2002 16:43:45 +1000')
def test_strip_line_feed_and_carriage_return_in_headers(self):
eq = self.assertEqual
# For [ 1002475 ] email message parser doesn't handle \r\n correctly
value1 = 'text'
value2 = 'more text'
m = 'Header: %s\r\nNext-Header: %s\r\n\r\nBody\r\n\r\n' % (
value1, value2)
msg = email.message_from_string(m)
eq(msg.get('Header'), value1)
eq(msg.get('Next-Header'), value2)
def test_rfc2822_header_syntax(self):
eq = self.assertEqual
m = '>From: foo\nFrom: bar\n!"#QUX;~: zoo\n\nbody'
msg = email.message_from_string(m)
eq(len(msg.keys()), 3)
keys = msg.keys()
keys.sort()
eq(keys, ['!"#QUX;~', '>From', 'From'])
eq(msg.get_payload(), 'body')
def test_rfc2822_space_not_allowed_in_header(self):
eq = self.assertEqual
m = '>From foo@example.com 11:25:53\nFrom: bar\n!"#QUX;~: zoo\n\nbody'
msg = email.message_from_string(m)
eq(len(msg.keys()), 0)
def test_rfc2822_one_character_header(self):
eq = self.assertEqual
m = 'A: first header\nB: second header\nCC: third header\n\nbody'
msg = email.message_from_string(m)
headers = msg.keys()
headers.sort()
eq(headers, ['A', 'B', 'CC'])
eq(msg.get_payload(), 'body')
def test_CRLFLF_at_end_of_part(self):
# issue 5610: feedparser should not eat two chars from body part ending
# with "\r\n\n".
m = (
"From: foo@bar.com\n"
"To: baz\n"
"Mime-Version: 1.0\n"
"Content-Type: multipart/mixed; boundary=BOUNDARY\n"
"\n"
"--BOUNDARY\n"
"Content-Type: text/plain\n"
"\n"
"body ending with CRLF newline\r\n"
"\n"
"--BOUNDARY--\n"
)
msg = email.message_from_string(m)
self.assertTrue(msg.get_payload(0).get_payload().endswith('\r\n'))
class TestBase64(unittest.TestCase):
def test_len(self):
eq = self.assertEqual
eq(base64MIME.base64_len('hello'),
len(base64MIME.encode('hello', eol='')))
for size in range(15):
if size == 0 : bsize = 0
elif size <= 3 : bsize = 4
elif size <= 6 : bsize = 8
elif size <= 9 : bsize = 12
elif size <= 12: bsize = 16
else : bsize = 20
eq(base64MIME.base64_len('x'*size), bsize)
def test_decode(self):
eq = self.assertEqual
eq(base64MIME.decode(''), '')
eq(base64MIME.decode('aGVsbG8='), 'hello')
eq(base64MIME.decode('aGVsbG8=', 'X'), 'hello')
eq(base64MIME.decode('aGVsbG8NCndvcmxk\n', 'X'), 'helloXworld')
def test_encode(self):
eq = self.assertEqual
eq(base64MIME.encode(''), '')
eq(base64MIME.encode('hello'), 'aGVsbG8=\n')
# Test the binary flag
eq(base64MIME.encode('hello\n'), 'aGVsbG8K\n')
eq(base64MIME.encode('hello\n', 0), 'aGVsbG8NCg==\n')
# Test the maxlinelen arg
eq(base64MIME.encode('xxxx ' * 20, maxlinelen=40), """\
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
eHh4eCB4eHh4IA==
""")
# Test the eol argument
eq(base64MIME.encode('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
eHh4eCB4eHh4IA==\r
""")
def test_header_encode(self):
eq = self.assertEqual
he = base64MIME.header_encode
eq(he('hello'), '=?iso-8859-1?b?aGVsbG8=?=')
eq(he('hello\nworld'), '=?iso-8859-1?b?aGVsbG8NCndvcmxk?=')
# Test the charset option
eq(he('hello', charset='iso-8859-2'), '=?iso-8859-2?b?aGVsbG8=?=')
# Test the keep_eols flag
eq(he('hello\nworld', keep_eols=True),
'=?iso-8859-1?b?aGVsbG8Kd29ybGQ=?=')
# Test the maxlinelen argument
eq(he('xxxx ' * 20, maxlinelen=40), """\
=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHggeHg=?=
=?iso-8859-1?b?eHggeHh4eCB4eHh4IHh4eHg=?=
=?iso-8859-1?b?IHh4eHggeHh4eCB4eHh4IHg=?=
=?iso-8859-1?b?eHh4IHh4eHggeHh4eCB4eHg=?=
=?iso-8859-1?b?eCB4eHh4IHh4eHggeHh4eCA=?=
=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHgg?=""")
# Test the eol argument
eq(he('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHggeHg=?=\r
=?iso-8859-1?b?eHggeHh4eCB4eHh4IHh4eHg=?=\r
=?iso-8859-1?b?IHh4eHggeHh4eCB4eHh4IHg=?=\r
=?iso-8859-1?b?eHh4IHh4eHggeHh4eCB4eHg=?=\r
=?iso-8859-1?b?eCB4eHh4IHh4eHggeHh4eCA=?=\r
=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHgg?=""")
class TestQuopri(unittest.TestCase):
def setUp(self):
self.hlit = [chr(x) for x in range(ord('a'), ord('z')+1)] + \
[chr(x) for x in range(ord('A'), ord('Z')+1)] + \
[chr(x) for x in range(ord('0'), ord('9')+1)] + \
['!', '*', '+', '-', '/', ' ']
self.hnon = [chr(x) for x in range(256) if chr(x) not in self.hlit]
assert len(self.hlit) + len(self.hnon) == 256
self.blit = [chr(x) for x in range(ord(' '), ord('~')+1)] + ['\t']
self.blit.remove('=')
self.bnon = [chr(x) for x in range(256) if chr(x) not in self.blit]
assert len(self.blit) + len(self.bnon) == 256
def test_header_quopri_check(self):
for c in self.hlit:
self.assertFalse(quopriMIME.header_quopri_check(c))
for c in self.hnon:
self.assertTrue(quopriMIME.header_quopri_check(c))
def test_body_quopri_check(self):
for c in self.blit:
self.assertFalse(quopriMIME.body_quopri_check(c))
for c in self.bnon:
self.assertTrue(quopriMIME.body_quopri_check(c))
def test_header_quopri_len(self):
eq = self.assertEqual
hql = quopriMIME.header_quopri_len
enc = quopriMIME.header_encode
for s in ('hello', 'h@e@l@l@o@'):
# Empty charset and no line-endings. 7 == RFC chrome
eq(hql(s), len(enc(s, charset='', eol=''))-7)
for c in self.hlit:
eq(hql(c), 1)
for c in self.hnon:
eq(hql(c), 3)
def test_body_quopri_len(self):
eq = self.assertEqual
bql = quopriMIME.body_quopri_len
for c in self.blit:
eq(bql(c), 1)
for c in self.bnon:
eq(bql(c), 3)
def test_quote_unquote_idempotent(self):
for x in range(256):
c = chr(x)
self.assertEqual(quopriMIME.unquote(quopriMIME.quote(c)), c)
def test_header_encode(self):
eq = self.assertEqual
he = quopriMIME.header_encode
eq(he('hello'), '=?iso-8859-1?q?hello?=')
eq(he('hello\nworld'), '=?iso-8859-1?q?hello=0D=0Aworld?=')
# Test the charset option
eq(he('hello', charset='iso-8859-2'), '=?iso-8859-2?q?hello?=')
# Test the keep_eols flag
eq(he('hello\nworld', keep_eols=True), '=?iso-8859-1?q?hello=0Aworld?=')
# Test a non-ASCII character
eq(he('hello\xc7there'), '=?iso-8859-1?q?hello=C7there?=')
# Test the maxlinelen argument
eq(he('xxxx ' * 20, maxlinelen=40), """\
=?iso-8859-1?q?xxxx_xxxx_xxxx_xxxx_xx?=
=?iso-8859-1?q?xx_xxxx_xxxx_xxxx_xxxx?=
=?iso-8859-1?q?_xxxx_xxxx_xxxx_xxxx_x?=
=?iso-8859-1?q?xxx_xxxx_xxxx_xxxx_xxx?=
=?iso-8859-1?q?x_xxxx_xxxx_?=""")
# Test the eol argument
eq(he('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
=?iso-8859-1?q?xxxx_xxxx_xxxx_xxxx_xx?=\r
=?iso-8859-1?q?xx_xxxx_xxxx_xxxx_xxxx?=\r
=?iso-8859-1?q?_xxxx_xxxx_xxxx_xxxx_x?=\r
=?iso-8859-1?q?xxx_xxxx_xxxx_xxxx_xxx?=\r
=?iso-8859-1?q?x_xxxx_xxxx_?=""")
def test_decode(self):
eq = self.assertEqual
eq(quopriMIME.decode(''), '')
eq(quopriMIME.decode('hello'), 'hello')
eq(quopriMIME.decode('hello', 'X'), 'hello')
eq(quopriMIME.decode('hello\nworld', 'X'), 'helloXworld')
def test_encode(self):
eq = self.assertEqual
eq(quopriMIME.encode(''), '')
eq(quopriMIME.encode('hello'), 'hello')
# Test the binary flag
eq(quopriMIME.encode('hello\r\nworld'), 'hello\nworld')
eq(quopriMIME.encode('hello\r\nworld', 0), 'hello\nworld')
# Test the maxlinelen arg
eq(quopriMIME.encode('xxxx ' * 20, maxlinelen=40), """\
xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxxx=
xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxx=
x xxxx xxxx xxxx xxxx=20""")
# Test the eol argument
eq(quopriMIME.encode('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxxx=\r
xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxx=\r
x xxxx xxxx xxxx xxxx=20""")
eq(quopriMIME.encode("""\
one line
two line"""), """\
one line
two line""")
# Test the Charset class
class TestCharset(unittest.TestCase):
def tearDown(self):
from email import Charset as CharsetModule
try:
del CharsetModule.CHARSETS['fake']
except KeyError:
pass
def test_idempotent(self):
eq = self.assertEqual
# Make sure us-ascii = no Unicode conversion
c = Charset('us-ascii')
s = 'Hello World!'
sp = c.to_splittable(s)
eq(s, c.from_splittable(sp))
# test 8-bit idempotency with us-ascii
s = '\xa4\xa2\xa4\xa4\xa4\xa6\xa4\xa8\xa4\xaa'
sp = c.to_splittable(s)
eq(s, c.from_splittable(sp))
def test_body_encode(self):
eq = self.assertEqual
# Try a charset with QP body encoding
c = Charset('iso-8859-1')
eq('hello w=F6rld', c.body_encode('hello w\xf6rld'))
# Try a charset with Base64 body encoding
c = Charset('utf-8')
eq('aGVsbG8gd29ybGQ=\n', c.body_encode('hello world'))
# Try a charset with None body encoding
c = Charset('us-ascii')
eq('hello world', c.body_encode('hello world'))
# Try the convert argument, where input codec != output codec
c = Charset('euc-jp')
# With apologies to Tokio Kikuchi ;)
try:
eq('\x1b$B5FCO;~IW\x1b(B',
c.body_encode('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7'))
eq('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7',
c.body_encode('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7', False))
except LookupError:
# We probably don't have the Japanese codecs installed
pass
# Testing SF bug #625509, which we have to fake, since there are no
# built-in encodings where the header encoding is QP but the body
# encoding is not.
from email import Charset as CharsetModule
CharsetModule.add_charset('fake', CharsetModule.QP, None)
c = Charset('fake')
eq('hello w\xf6rld', c.body_encode('hello w\xf6rld'))
def test_unicode_charset_name(self):
charset = Charset(u'us-ascii')
self.assertEqual(str(charset), 'us-ascii')
self.assertRaises(Errors.CharsetError, Charset, 'asc\xffii')
def test_codecs_aliases_accepted(self):
charset = Charset('utf8')
self.assertEqual(str(charset), 'utf-8')
# Test multilingual MIME headers.
class TestHeader(TestEmailBase):
def test_simple(self):
eq = self.ndiffAssertEqual
h = Header('Hello World!')
eq(h.encode(), 'Hello World!')
h.append(' Goodbye World!')
eq(h.encode(), 'Hello World! Goodbye World!')
def test_simple_surprise(self):
eq = self.ndiffAssertEqual
h = Header('Hello World!')
eq(h.encode(), 'Hello World!')
h.append('Goodbye World!')
eq(h.encode(), 'Hello World! Goodbye World!')
def test_header_needs_no_decoding(self):
h = 'no decoding needed'
self.assertEqual(decode_header(h), [(h, None)])
def test_long(self):
h = Header("I am the very model of a modern Major-General; I've information vegetable, animal, and mineral; I know the kings of England, and I quote the fights historical from Marathon to Waterloo, in order categorical; I'm very well acquainted, too, with matters mathematical; I understand equations, both the simple and quadratical; about binomial theorem I'm teeming with a lot o' news, with many cheerful facts about the square of the hypotenuse.",
maxlinelen=76)
for l in h.encode(splitchars=' ').split('\n '):
self.assertTrue(len(l) <= 76)
def test_multilingual(self):
eq = self.ndiffAssertEqual
g = Charset("iso-8859-1")
cz = Charset("iso-8859-2")
utf8 = Charset("utf-8")
g_head = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
cz_head = "Finan\xe8ni metropole se hroutily pod tlakem jejich d\xf9vtipu.. "
utf8_head = u"\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das Nunstuck git und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt gersput.\u300d\u3068\u8a00\u3063\u3066\u3044\u307e\u3059\u3002".encode("utf-8")
h = Header(g_head, g)
h.append(cz_head, cz)
h.append(utf8_head, utf8)
enc = h.encode()
eq(enc, """\
=?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerderband_ko?=
=?iso-8859-1?q?mfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndischen_Wan?=
=?iso-8859-1?q?dgem=E4lden_vorbei=2C_gegen_die_rotierenden_Klingen_bef=F6?=
=?iso-8859-1?q?rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_metropole_se_hroutily?=
=?iso-8859-2?q?_pod_tlakem_jejich_d=F9vtipu=2E=2E_?= =?utf-8?b?5q2j56K6?=
=?utf-8?b?44Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE44G+44Gb44KT44CC?=
=?utf-8?b?5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB44GC44Go44Gv44Gn?=
=?utf-8?b?44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CMV2VubiBpc3QgZGFz?=
=?utf-8?q?_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das_Oder_die_Fl?=
=?utf-8?b?aXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBo+OBpuOBhOOBvuOBmQ==?=
=?utf-8?b?44CC?=""")
eq(decode_header(enc),
[(g_head, "iso-8859-1"), (cz_head, "iso-8859-2"),
(utf8_head, "utf-8")])
ustr = unicode(h)
eq(ustr.encode('utf-8'),
'Die Mieter treten hier ein werden mit einem Foerderband '
'komfortabel den Korridor entlang, an s\xc3\xbcdl\xc3\xbcndischen '
'Wandgem\xc3\xa4lden vorbei, gegen die rotierenden Klingen '
'bef\xc3\xb6rdert. Finan\xc4\x8dni metropole se hroutily pod '
'tlakem jejich d\xc5\xafvtipu.. \xe6\xad\xa3\xe7\xa2\xba\xe3\x81'
'\xab\xe8\xa8\x80\xe3\x81\x86\xe3\x81\xa8\xe7\xbf\xbb\xe8\xa8\xb3'
'\xe3\x81\xaf\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3'
'\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x80\x82\xe4\xb8\x80\xe9\x83'
'\xa8\xe3\x81\xaf\xe3\x83\x89\xe3\x82\xa4\xe3\x83\x84\xe8\xaa\x9e'
'\xe3\x81\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\xe3\x81\x82\xe3'
'\x81\xa8\xe3\x81\xaf\xe3\x81\xa7\xe3\x81\x9f\xe3\x82\x89\xe3\x82'
'\x81\xe3\x81\xa7\xe3\x81\x99\xe3\x80\x82\xe5\xae\x9f\xe9\x9a\x9b'
'\xe3\x81\xab\xe3\x81\xaf\xe3\x80\x8cWenn ist das Nunstuck git '
'und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt '
'gersput.\xe3\x80\x8d\xe3\x81\xa8\xe8\xa8\x80\xe3\x81\xa3\xe3\x81'
'\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80\x82')
# Test make_header()
newh = make_header(decode_header(enc))
eq(newh, enc)
def test_header_ctor_default_args(self):
eq = self.ndiffAssertEqual
h = Header()
eq(h, '')
h.append('foo', Charset('iso-8859-1'))
eq(h, '=?iso-8859-1?q?foo?=')
def test_explicit_maxlinelen(self):
eq = self.ndiffAssertEqual
hstr = 'A very long line that must get split to something other than at the 76th character boundary to test the non-default behavior'
h = Header(hstr)
eq(h.encode(), '''\
A very long line that must get split to something other than at the 76th
character boundary to test the non-default behavior''')
h = Header(hstr, header_name='Subject')
eq(h.encode(), '''\
A very long line that must get split to something other than at the
76th character boundary to test the non-default behavior''')
h = Header(hstr, maxlinelen=1024, header_name='Subject')
eq(h.encode(), hstr)
def test_us_ascii_header(self):
eq = self.assertEqual
s = 'hello'
x = decode_header(s)
eq(x, [('hello', None)])
h = make_header(x)
eq(s, h.encode())
def test_string_charset(self):
eq = self.assertEqual
h = Header()
h.append('hello', 'iso-8859-1')
eq(h, '=?iso-8859-1?q?hello?=')
## def test_unicode_error(self):
## raises = self.assertRaises
## raises(UnicodeError, Header, u'[P\xf6stal]', 'us-ascii')
## raises(UnicodeError, Header, '[P\xf6stal]', 'us-ascii')
## h = Header()
## raises(UnicodeError, h.append, u'[P\xf6stal]', 'us-ascii')
## raises(UnicodeError, h.append, '[P\xf6stal]', 'us-ascii')
## raises(UnicodeError, Header, u'\u83ca\u5730\u6642\u592b', 'iso-8859-1')
def test_utf8_shortest(self):
eq = self.assertEqual
h = Header(u'p\xf6stal', 'utf-8')
eq(h.encode(), '=?utf-8?q?p=C3=B6stal?=')
h = Header(u'\u83ca\u5730\u6642\u592b', 'utf-8')
eq(h.encode(), '=?utf-8?b?6I+K5Zyw5pmC5aSr?=')
def test_bad_8bit_header(self):
raises = self.assertRaises
eq = self.assertEqual
x = 'Ynwp4dUEbay Auction Semiar- No Charge \x96 Earn Big'
raises(UnicodeError, Header, x)
h = Header()
raises(UnicodeError, h.append, x)
eq(str(Header(x, errors='replace')), x)
h.append(x, errors='replace')
eq(str(h), x)
def test_encoded_adjacent_nonencoded(self):
eq = self.assertEqual
h = Header()
h.append('hello', 'iso-8859-1')
h.append('world')
s = h.encode()
eq(s, '=?iso-8859-1?q?hello?= world')
h = make_header(decode_header(s))
eq(h.encode(), s)
def test_whitespace_eater(self):
eq = self.assertEqual
s = 'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztk=?= =?koi8-r?q?=CA?= zz.'
parts = decode_header(s)
eq(parts, [('Subject:', None), ('\xf0\xd2\xcf\xd7\xc5\xd2\xcb\xc1 \xce\xc1 \xc6\xc9\xce\xc1\xcc\xd8\xce\xd9\xca', 'koi8-r'), ('zz.', None)])
hdr = make_header(parts)
eq(hdr.encode(),
'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztnK?= zz.')
def test_broken_base64_header(self):
raises = self.assertRaises
s = 'Subject: =?EUC-KR?B?CSixpLDtKSC/7Liuvsax4iC6uLmwMcijIKHaILzSwd/H0SC8+LCjwLsgv7W/+Mj3I ?='
raises(Errors.HeaderParseError, decode_header, s)
# Issue 1078919
def test_ascii_add_header(self):
msg = Message()
msg.add_header('Content-Disposition', 'attachment',
filename='bud.gif')
self.assertEqual('attachment; filename="bud.gif"',
msg['Content-Disposition'])
def test_nonascii_add_header_via_triple(self):
msg = Message()
msg.add_header('Content-Disposition', 'attachment',
filename=('iso-8859-1', '', 'Fu\xdfballer.ppt'))
self.assertEqual(
'attachment; filename*="iso-8859-1\'\'Fu%DFballer.ppt"',
msg['Content-Disposition'])
def test_encode_unaliased_charset(self):
# Issue 1379416: when the charset has no output conversion,
# output was accidentally getting coerced to unicode.
res = Header('abc','iso-8859-2').encode()
self.assertEqual(res, '=?iso-8859-2?q?abc?=')
self.assertIsInstance(res, str)
# Test RFC 2231 header parameters (en/de)coding
class TestRFC2231(TestEmailBase):
def test_get_param(self):
eq = self.assertEqual
msg = self._msgobj('msg_29.txt')
eq(msg.get_param('title'),
('us-ascii', 'en', 'This is even more ***fun*** isn\'t it!'))
eq(msg.get_param('title', unquote=False),
('us-ascii', 'en', '"This is even more ***fun*** isn\'t it!"'))
def test_set_param(self):
eq = self.assertEqual
msg = Message()
msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
charset='us-ascii')
eq(msg.get_param('title'),
('us-ascii', '', 'This is even more ***fun*** isn\'t it!'))
msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
charset='us-ascii', language='en')
eq(msg.get_param('title'),
('us-ascii', 'en', 'This is even more ***fun*** isn\'t it!'))
msg = self._msgobj('msg_01.txt')
msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
charset='us-ascii', language='en')
self.ndiffAssertEqual(msg.as_string(), """\
Return-Path: <bbb@zzz.org>
Delivered-To: bbb@zzz.org
Received: by mail.zzz.org (Postfix, from userid 889)
id 27CEAD38CC; Fri, 4 May 2001 14:05:44 -0400 (EDT)
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
From: bbb@ddd.com (John X. Doe)
To: bbb@zzz.org
Subject: This is a test message
Date: Fri, 4 May 2001 14:05:44 -0400
Content-Type: text/plain; charset=us-ascii;
title*="us-ascii'en'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20isn%27t%20it%21"
Hi,
Do you like this message?
-Me
""")
def test_del_param(self):
eq = self.ndiffAssertEqual
msg = self._msgobj('msg_01.txt')
msg.set_param('foo', 'bar', charset='us-ascii', language='en')
msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
charset='us-ascii', language='en')
msg.del_param('foo', header='Content-Type')
eq(msg.as_string(), """\
Return-Path: <bbb@zzz.org>
Delivered-To: bbb@zzz.org
Received: by mail.zzz.org (Postfix, from userid 889)
id 27CEAD38CC; Fri, 4 May 2001 14:05:44 -0400 (EDT)
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
From: bbb@ddd.com (John X. Doe)
To: bbb@zzz.org
Subject: This is a test message
Date: Fri, 4 May 2001 14:05:44 -0400
Content-Type: text/plain; charset="us-ascii";
title*="us-ascii'en'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20isn%27t%20it%21"
Hi,
Do you like this message?
-Me
""")
def test_rfc2231_get_content_charset(self):
eq = self.assertEqual
msg = self._msgobj('msg_32.txt')
eq(msg.get_content_charset(), 'us-ascii')
def test_rfc2231_no_language_or_charset(self):
m = '''\
Content-Transfer-Encoding: 8bit
Content-Disposition: inline; filename="file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm"
Content-Type: text/html; NAME*0=file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEM; NAME*1=P_nsmail.htm
'''
msg = email.message_from_string(m)
param = msg.get_param('NAME')
self.assertFalse(isinstance(param, tuple))
self.assertEqual(
param,
'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm')
def test_rfc2231_no_language_or_charset_in_filename(self):
m = '''\
Content-Disposition: inline;
\tfilename*0*="''This%20is%20even%20more%20";
\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(),
'This is even more ***fun*** is it not.pdf')
def test_rfc2231_no_language_or_charset_in_filename_encoded(self):
m = '''\
Content-Disposition: inline;
\tfilename*0*="''This%20is%20even%20more%20";
\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(),
'This is even more ***fun*** is it not.pdf')
def test_rfc2231_partly_encoded(self):
m = '''\
Content-Disposition: inline;
\tfilename*0="''This%20is%20even%20more%20";
\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(
msg.get_filename(),
'This%20is%20even%20more%20***fun*** is it not.pdf')
def test_rfc2231_partly_nonencoded(self):
m = '''\
Content-Disposition: inline;
\tfilename*0="This%20is%20even%20more%20";
\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(
msg.get_filename(),
'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20is it not.pdf')
def test_rfc2231_no_language_or_charset_in_boundary(self):
m = '''\
Content-Type: multipart/alternative;
\tboundary*0*="''This%20is%20even%20more%20";
\tboundary*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tboundary*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_boundary(),
'This is even more ***fun*** is it not.pdf')
def test_rfc2231_no_language_or_charset_in_charset(self):
# This is a nonsensical charset value, but tests the code anyway
m = '''\
Content-Type: text/plain;
\tcharset*0*="This%20is%20even%20more%20";
\tcharset*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tcharset*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_content_charset(),
'this is even more ***fun*** is it not.pdf')
def test_rfc2231_bad_encoding_in_filename(self):
m = '''\
Content-Disposition: inline;
\tfilename*0*="bogus'xx'This%20is%20even%20more%20";
\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2="is it not.pdf"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(),
'This is even more ***fun*** is it not.pdf')
def test_rfc2231_bad_encoding_in_charset(self):
m = """\
Content-Type: text/plain; charset*=bogus''utf-8%E2%80%9D
"""
msg = email.message_from_string(m)
# This should return None because non-ascii characters in the charset
# are not allowed.
self.assertEqual(msg.get_content_charset(), None)
def test_rfc2231_bad_character_in_charset(self):
m = """\
Content-Type: text/plain; charset*=ascii''utf-8%E2%80%9D
"""
msg = email.message_from_string(m)
# This should return None because non-ascii characters in the charset
# are not allowed.
self.assertEqual(msg.get_content_charset(), None)
def test_rfc2231_bad_character_in_filename(self):
m = '''\
Content-Disposition: inline;
\tfilename*0*="ascii'xx'This%20is%20even%20more%20";
\tfilename*1*="%2A%2A%2Afun%2A%2A%2A%20";
\tfilename*2*="is it not.pdf%E2"
'''
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(),
u'This is even more ***fun*** is it not.pdf\ufffd')
def test_rfc2231_unknown_encoding(self):
m = """\
Content-Transfer-Encoding: 8bit
Content-Disposition: inline; filename*=X-UNKNOWN''myfile.txt
"""
msg = email.message_from_string(m)
self.assertEqual(msg.get_filename(), 'myfile.txt')
def test_rfc2231_single_tick_in_filename_extended(self):
eq = self.assertEqual
m = """\
Content-Type: application/x-foo;
\tname*0*=\"Frank's\"; name*1*=\" Document\"
"""
msg = email.message_from_string(m)
charset, language, s = msg.get_param('name')
eq(charset, None)
eq(language, None)
eq(s, "Frank's Document")
def test_rfc2231_single_tick_in_filename(self):
m = """\
Content-Type: application/x-foo; name*0=\"Frank's\"; name*1=\" Document\"
"""
msg = email.message_from_string(m)
param = msg.get_param('name')
self.assertFalse(isinstance(param, tuple))
self.assertEqual(param, "Frank's Document")
def test_rfc2231_tick_attack_extended(self):
eq = self.assertEqual
m = """\
Content-Type: application/x-foo;
\tname*0*=\"us-ascii'en-us'Frank's\"; name*1*=\" Document\"
"""
msg = email.message_from_string(m)
charset, language, s = msg.get_param('name')
eq(charset, 'us-ascii')
eq(language, 'en-us')
eq(s, "Frank's Document")
def test_rfc2231_tick_attack(self):
m = """\
Content-Type: application/x-foo;
\tname*0=\"us-ascii'en-us'Frank's\"; name*1=\" Document\"
"""
msg = email.message_from_string(m)
param = msg.get_param('name')
self.assertFalse(isinstance(param, tuple))
self.assertEqual(param, "us-ascii'en-us'Frank's Document")
def test_rfc2231_no_extended_values(self):
eq = self.assertEqual
m = """\
Content-Type: application/x-foo; name=\"Frank's Document\"
"""
msg = email.message_from_string(m)
eq(msg.get_param('name'), "Frank's Document")
def test_rfc2231_encoded_then_unencoded_segments(self):
eq = self.assertEqual
m = """\
Content-Type: application/x-foo;
\tname*0*=\"us-ascii'en-us'My\";
\tname*1=\" Document\";
\tname*2*=\" For You\"
"""
msg = email.message_from_string(m)
charset, language, s = msg.get_param('name')
eq(charset, 'us-ascii')
eq(language, 'en-us')
eq(s, 'My Document For You')
def test_rfc2231_unencoded_then_encoded_segments(self):
eq = self.assertEqual
m = """\
Content-Type: application/x-foo;
\tname*0=\"us-ascii'en-us'My\";
\tname*1*=\" Document\";
\tname*2*=\" For You\"
"""
msg = email.message_from_string(m)
charset, language, s = msg.get_param('name')
eq(charset, 'us-ascii')
eq(language, 'en-us')
eq(s, 'My Document For You')
# Tests to ensure that signed parts of an email are completely preserved, as
# required by RFC1847 section 2.1. Note that these are incomplete, because the
# email package does not currently always preserve the body. See issue 1670765.
class TestSigned(TestEmailBase):
def _msg_and_obj(self, filename):
fp = openfile(findfile(filename))
try:
original = fp.read()
msg = email.message_from_string(original)
finally:
fp.close()
return original, msg
def _signed_parts_eq(self, original, result):
# Extract the first mime part of each message
import re
repart = re.compile(r'^--([^\n]+)\n(.*?)\n--\1$', re.S | re.M)
inpart = repart.search(original).group(2)
outpart = repart.search(result).group(2)
self.assertEqual(outpart, inpart)
def test_long_headers_as_string(self):
original, msg = self._msg_and_obj('msg_45.txt')
result = msg.as_string()
self._signed_parts_eq(original, result)
def test_long_headers_flatten(self):
original, msg = self._msg_and_obj('msg_45.txt')
fp = StringIO()
Generator(fp).flatten(msg)
result = fp.getvalue()
self._signed_parts_eq(original, result)
def _testclasses():
mod = sys.modules[__name__]
return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')]
def suite():
suite = unittest.TestSuite()
for testclass in _testclasses():
suite.addTest(unittest.makeSuite(testclass))
return suite
def test_main():
for testclass in _testclasses():
run_unittest(testclass)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit |
palmishr/3D_Curve_Reconstruct | helper.py | 1 | 19473 |
# coding: utf-8
# In[10]:
'''
# Planar curves generation - Cube Method
Divide a unit cube into 4 adjecent sub-cubes, pick a point randomly
from each sub-unit within a pre-specified planar volume.
The area within each sub-unit can be chosen based on
the desired expanse of the curve.
Join the chosen point by spline interpolation.
'''
# Helper functions for Spline Processing
from __future__ import division, print_function, absolute_import
import sys
import time
import math
import scipy as sp
from scipy.interpolate import splprep, splev
import random
import json
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import LinearLocator
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
from itertools import product, combinations
import random as random
import sklearn.datasets, sklearn.decomposition
import numpy.linalg as linalg
get_ipython().magic('matplotlib notebook')
# Helper functions for Spline Processing
def get_3d_points(X,Y,Z):
pts = np.concatenate((X,Y,Z), axis=0)
pts = pts.reshape(3,len(X))
return pts
def add_curve_to_array(x, y, z):
inputs = np.concatenate((x,y,z), axis=0)
len(inputs)
inputs = inputs.reshape(3,300)
return inputs
# Spline Generation
def spline_generate(pts):
#pts = np.unique(pts)
tck, u = splprep(pts, u=None, s=0.0)
u_new = np.linspace(u.min(), u.max(), 300)
x_new, y_new, z_new = splev(u_new, tck, der=0)
return x_new, y_new, z_new
def get_rot_angle(theta=0):
if(theta == 0): theta = np.random.uniform(0,1)*2*np.pi
cos_t = np.cos(theta)
sin_t = np.sin(theta)
return cos_t, sin_t
def random_rotate(x,y,z,a=0,b=0,g=0):
cos_t, sin_t = get_rot_angle(a)
r_x = np.matrix([[1, 0, 0], [0, cos_t, -sin_t], [0, sin_t, cos_t]])
cos_t, sin_t = get_rot_angle(b)
r_y = np.matrix([[cos_t, 0, sin_t], [0, 1, 0], [-sin_t,0, cos_t]])
cos_t, sin_t = get_rot_angle(g)
r_z = np.matrix([[cos_t, -sin_t, 0], [sin_t, cos_t, 0], [0, 0, 1]])
r = np.dot((np.dot(r_x, r_y)), r_z)
rot_v = np.dot(r,np.matrix([[x],[y],[z]]))
return rot_v.item(0),rot_v.item(1),rot_v.item(2)
def draw_cube(b):
for s, e in combinations(np.array(b), 2):
if np.sum(np.abs(s-e)) == 1:
ax.plot3D(*zip(s, e), color="r")
def create_show_p_curve():
boxes = [
[(0.5, 0.9), (0.9, 0.9), (0.5, 0.9)], #[(0.1, 0.5), (0.9, 0.9), (0.1, 0.5)],
[(-0.9, -0.5), (0.9, 0.9), (0.5, 0.9)], #[(-0.5, -0.1), (0.9, 0.9), (0.1, 0.5)],
[(0.5, 0.9), (0.9, 0.9), (-0.9, -0.5)], #[(0.1, 0.5), (0.9, 0.9), (-0.5, -0.1)],
[(-0.9, -0.5), (0.9, 0.9), (-0.9, -0.5)] #[(-0.5, -0.1), (0.9, 0.9), (-0.5, -0.1)],
]
X_raw=[]
Y_raw=[]
Z_raw=[]
N=1
data = {}
data['planar_curves'] = []
startTime = time.time()
for i in range(N):
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
pts=[]
X_raw=[]
Y_raw=[]
Z_raw=[]
#for all points in this curve
x_theta = np.random.uniform(0,1)*2*np.pi
y_theta = np.random.uniform(0,1)*2*np.pi
z_theta = np.random.uniform(0,1)*2*np.pi
for b in boxes:
x = random.uniform(b[0][0]/1, b[0][1]/1)
y = random.uniform(b[1][0]/1, b[1][1]/1)
z = random.uniform(b[2][0]/1, b[2][1]/1)
x,y,z = random_rotate(x,y,z, x_theta, y_theta, z_theta)
X_raw.append(x)
Y_raw.append(y)
Z_raw.append(z)
# draw cube
r = [-1, 1]
for s, e in combinations(np.array(list(product(r, r, r))), 2):
if np.sum(np.abs(s-e)) == r[1]-r[0]:
ax.plot3D(*zip(s, e), color="b")
pts = get_3d_points(X_raw,Y_raw,Z_raw)
ax.plot(X_raw, Y_raw, Z_raw, 'ro')
X, Y, Z = spline_generate(pts)
curve = add_curve_to_array(X, Y, Z)
ax.plot(X, Y, Z, 'b--')
ax.set_xlabel('X axis')
ax.set_ylabel('Y axis')
ax.set_zlabel('Z axis')
plt.show()
data['planar_curves'].append(curve.tolist())
#create_show_p_curve()
# In[12]:
'''
# Non-planar curves generation - Cube Method
Divide a unit cube into 8 sub-cubes, pick a point randomly
from each sub-unit. Join the chosen point by spline
interpolation.
The area within each sub-unit can be chosen based on
the desired expanse of the curve.
'''
# Create Test NP Curve
def create_plot_new_np_curve():
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# draw cube
r = [-1, 1]
for s, e in combinations(np.array(list(product(r, r, r))), 2):
if np.sum(np.abs(s-e)) == r[1]-r[0]:
ax.plot3D(*zip(s, e), color="b")
#plt.show()
boxes = [[(0.0, 1.0), (0.0, 1.0), (0.0, 1.0)],
[(-1.0, 0.0), (0.0, 1.0), (0.0, 1.0)],
[(0.0, 1.0), (-1.0, 0.0), (0.0, 1.0)],
[(0.0, 1.0), (0.0, 1.0), (-1.0, 0.0)],
[(-1.0, 0.0), (-1.0, 0.0), (-1.0, 0.0)],
[(0.0, 1.0), (-1.0, 0.0), (-1.0, 0.0)],
[(-1.0, 0.0), (-1.0, 0.0), (0.0, 1.0)],
[(-1.0, 0.0), (0.0, 1.0), (-1.0, 0.0)]]
import random as random
X_raw=[]
Y_raw=[]
Z_raw=[]
N=1
startTime = time.time()
for i in range(N):
X_raw=[]
Y_raw=[]
Z_raw=[]
for b in boxes:
x = random.uniform(b[0][0]/1, b[0][1]/1)
y = random.uniform(b[1][0]/1, b[1][1]/1)
z = random.uniform(b[2][0]/1, b[2][1]/1)
#print(x,y,z)
X_raw.append(x)
Y_raw.append(y)
Z_raw.append(z)
pts = get_3d_points(X_raw,Y_raw,Z_raw)
X, Y, Z = spline_generate(pts)
curve = add_curve_to_array(X, Y, Z)
ax.plot(X, Y, Z, 'b--')
ax.plot(X_raw, Y_raw, Z_raw, 'ro')
plt.show()
#create_plot_new_np_curve()
# In[38]:
# PCA analysis and plot
with open('np_curve_data_cube_method_1489680586.46.json') as infile:
c = json.load(infile)
n_planar_curves_array = np.asarray(c['non_planar_curves'])
with open('p_curve_data_cube_method_1489173944.8.json') as infile:
c = json.load(infile)
planar_curves_array = np.asarray(c['planar_curves'])
data = {}
data['planar_curves_error'] = []
data['non_planar_curves_error'] = []
import numpy as np
def pca_err(curves_array):
errors=[]
im = 0
for i in range(len(curves_array[:])):
X = curves_array[i].T
mu = np.mean(X, axis=0)
#print("X: ", X.shape)
#print("mu: ", mu)
pca = sklearn.decomposition.PCA()
pca.fit(X)
#ax1.plot(curves_array[i][0], curves_array[i][1], curves_array[i][2], 'ro')
nComp = 2
#print("Transfomed: ", pca.transform(X)[:,:nComp].shape)
#print("EV: ", pca.components_[:,:][:,:nComp])
transformed = pca.transform(X)[:,:nComp].T
if (im < 1):
fig = plt.figure()
fig.suptitle('Top Left - Original Curve | Top Right - PCA | Bottom Left - Reconstucted Curve', fontsize=10)
ax1 = fig.add_subplot(221, projection='3d')
ax2 = fig.add_subplot(222, projection='3d')
ax3 = fig.add_subplot(223, projection='3d')
ax1.plot(curves_array[0][0], curves_array[0][1], curves_array[0][2], 'ro')
ax2.plot(transformed[0], transformed[1], 'ro')
Xhat = np.dot(pca.transform(X)[:,:nComp], pca.components_[:nComp,:])
Xhat += mu
reconstructed_curve = Xhat.T
if (im < 1):
ax3.plot(reconstructed_curve[0], reconstructed_curve[1], reconstructed_curve[2], 'ro')
plt.show()
#print(Xhat.shape)
err = 0.5*sum((X-Xhat)**2)
errors.append(sum(err))
im = im+1
#print("Err: ", err)
return np.asarray(errors)
def plot_PCA_errors():
np_pca_err = pca_err(n_planar_curves_array)
p_pca_err = pca_err(planar_curves_array)
get_ipython().magic('matplotlib inline')
bins = np.linspace(0, 50, 50)
plt.hist(np_pca_err, bins, alpha=0.35, label='NPE')
plt.hist(p_pca_err, bins, alpha=0.35, label='PE')
plt.legend(loc='upper right')
plt.title('Reconstruction Errors Histogram')
plt.show()
#plot_PCA_errors()
# In[37]:
# PCA weigths initialized auto-encoder
# In[20]:
#Non-Planar Errors
def ae_with_pca_wt_np_errors():
with open('planarity_errors_1490807988.67.json') as infile:
c = json.load(infile)
np_errors = np.asarray(c['non_planar_curves_error'])
p_errors = np.asarray(c['planar_curves_error'])
NPE = np.insert(np_errors, 1, 1, axis=2)
PE = np.insert(p_errors, 1, 0, axis=2)
X = np.concatenate((NPE, PE), axis=0)
X = X.reshape(200,2)
hist, bins = np.histogram(X[0:100,0], bins=50)
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
plt.bar(center, hist, align='center', width=width)
plt.show()
# In[21]:
#Planar Errors
def ae_with_pca_wt_p_errors():
with open('planarity_errors_1490807988.67.json') as infile:
c = json.load(infile)
np_errors = np.asarray(c['non_planar_curves_error'])
p_errors = np.asarray(c['planar_curves_error'])
NPE = np.insert(np_errors, 1, 1, axis=2)
PE = np.insert(p_errors, 1, 0, axis=2)
X = np.concatenate((NPE, PE), axis=0)
X = X.reshape(200,2)
hist, bins = np.histogram(X[100:200,0], bins=50)
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
plt.bar(center, hist, align='center', width=width)
plt.show()
# In[42]:
#Autoencoder
_debug_verbose = False
class AutoEncoder(object):
def __init__(self, arch):
self.num_layers = len(arch)
self.input_layer_size = arch[0]
self.output_layer_size = arch[-1]
self.num_hidden_layers = len(arch)-2
self.costs = []
self.weights = [np.random.randn(y, x)
for x, y in zip(arch[:-1], arch[1:])]
self.biases = [np.random.randn(y, 1) for y in arch[1:]]
def getParams(self):
#Get weights and biases unrolled into vector:
params = [(x.ravel(), y.ravel()) for x, y in zip(self.weights, self.biases)]
return params
def forward(self, X):
for b, w in zip(self.biases, self.weights):
if (_debug_verbose): print("weights: ", w)
if (_debug_verbose): print("biases: ", b)
if (_debug_verbose): print("inputs :", X)
if (_debug_verbose): print("dot product :", np.dot(w, X))
#print("matrix dot product :", w.dot(X))
X = self.unit_step(np.dot(w, X) + b)
if (_debug_verbose): print("result :", X)
return X.reshape(3,1)
def unit_step(self, z):
#return (lambda x: 0 if (x) < 0 else 1, z)[1]
return z
def unit_step_prime(self, z):
return (1)
def cost_function(self, X):
self.yHat = self.forward(X)
if (_debug_verbose): print ("squared error of X:{0} - Xhat:{1} is {2} & sum is {3}\n".format(X, self.yHat, ((X-self.yHat)**2), sum((X-self.yHat)**2)))
J = 0.5*sum((X-self.yHat)**2)
#self.costs.append(J)
return J
def cost_derivative(self, output_activations, y):
return (output_activations-y)
def cost_function_by_epoch(self, test_data, n_test):
y_hat = [(self.forward(y)) for (y) in test_data[0:n_test]]
y = [(y) for (y) in test_data[0:n_test]]
#print([float(a[0][0]) for a in y])
np.seterr( over='ignore' )
#costs = []
costs = [0.5*((a - b)**2) for a, b in zip(y, y_hat)]
#costs.append([max(math.sqrt(0.5*(round(a[0][0],2) - round(b[0][0],2))**2),1000) for a, b in zip(y, y_hat)])
#costs.append([0.5*math.sqrt((float(a[1][0]) - float(b[1][0]))**2) for a, b in zip(y, y_hat)])
#costs.append([0.5*math.sqrt((float(a[2][0]) - float(b[2][0]))**2) for a, b in zip(y, y_hat)])
self.costs.append(sum(costs)) #/n_test)
#self.costs.append(sum(costs[:][:]))
#self.costs.append([sum(costs[0]),sum(costs[1]),sum(costs[2])])
if (_debug_verbose): print ("Total Cost {1} for Epoch {0} complete".format(len(self.costs), sum(self.costs[-1])))
if (_debug_verbose): print ("Axis-wise Cost is {0} ".format((self.costs[-1])))
return self.costs[-1]
def GD(self, training_data, epochs, learning_rate, test_data=None):
"""Train the neural network using batch-wise
gradient descent. If ``test_data`` is provided then the
network will be evaluated against the test data after each
epoch, and partial progress printed out."""
if test_data: n_test = len(test_data)
n = len(training_data)
for j in range(epochs):
np.random.shuffle(training_data)
self.process_batch(training_data, learning_rate)
if test_data:
result = self.evaluate(test_data, n_test)
if (_debug_verbose): print ("Epoch {0}: Score {1} / {2}".format(j, result, n_test))
else:
if (_debug_verbose): print ("Epoch {0} complete".format(j))
def process_batch(self, batch, learning_rate):
"""Update the network's weights by applying
gradient descent using backpropagation to a single batch.
"""
base_w = [np.zeros(w.shape) for w in self.weights]
base_b = [np.zeros(b.shape) for b in self.biases]
count=0
for x in batch:
delta_error_b , delta_error_w = self.backprop(x)
updated_b = [nb+dnb for nb, dnb in zip(base_b, delta_error_b)]
updated_w = [nw+dnw for nw, dnw in zip(base_w, delta_error_w)]
count=count+1
#print ("Process {0} inputs backprop ".format(count))
eta=learning_rate
self.weights = [w-(eta/len(batch))*nw
for w, nw in zip(self.weights, updated_w)]
self.biases = [b-(eta/len(batch))*nb
for b, nb in zip(self.biases, updated_b)]
def backprop(self, x):
"""Return ``( delta_w)`` representing the
gradient for the cost function C_x. """
if (_debug_verbose): print ("input: ", x)
delta_w = [np.zeros(w.shape) for w in self.weights]
delta_b = [np.zeros(b.shape) for b in self.biases]
activation = x
activations = [x] # list to store all the activations, layer by layer
zs = [] # list to store all the activation (z) vectors, layer by layer
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation) + b
zs.append(z)
activation = self.unit_step(z)
activations.append(activation)
if (_debug_verbose): print ("activations: ", activations)
# backward pass
delta = self.cost_derivative(activations[-1], x) * self.unit_step_prime(zs[-1])
delta_b[-1] = delta
delta_w[-1] = np.dot(delta, activations[-2].transpose())
if (_debug_verbose): print ("cost derivative: ", self.cost_derivative(activations[-1], x))
if (_debug_verbose): print ("unit step: ", self.unit_step_prime(zs[-1]))
if (_debug_verbose): print("delta: ",delta)
for l in range(2, self.num_layers):
z = zs[-l]
step1 = np.dot(self.weights[-l+1].transpose(), delta)
delta = step1 * z
delta_b[-l] = delta
delta_w[-l] = np.dot(delta, activations[-l-1].transpose())
if (_debug_verbose): print ("delta b updated: ", delta_b)
if (_debug_verbose): print ("delta w updated:", delta_w)
#print ("delta b: ", delta_b)
#print ("delta w:", delta_w)
return (delta_b, delta_w)
def evaluate(self, test_data, n_test):
"""Return the number of test inputs for which the neural
network outputs the correct result. Note that the neural
network's output is assumed to be the index of whichever
neuron in the final layer has the highest activation."""
self.cost_function_by_epoch(test_data, n_test)
test_results = [self.forward(x)
for (x) in test_data]
return sum(((x) - (x_hat))**2 for (x, x_hat) in zip(test_data, test_results))/n_test
def reconstruct(self, inputs):
return [self.forward(x) for (x) in inputs]
# In[19]:
def rdm_wt_ae_errors():
import numpy as np
import matplotlib.pyplot as plt
with open('planarity_errors_1489714415.76.json') as infile:
c = json.load(infile)
np_errors = np.asarray(c['non_planar_curves_error'])
p_errors = np.asarray(c['planar_curves_error'])
# clean data
NPE = np.insert(np_errors, 1, 1, axis=2)
PE = np.insert(p_errors, 1, 0, axis=2)
X = np.concatenate((NPE, PE), axis=0)
X = X.reshape(200,2)
nan_idx = [i for i, x in enumerate(X) if (math.isnan(x[0]) == True)]
print(nan_idx)
X_cleaned = np.delete(X, nan_idx, axis=0)
X_cleaned.shape
bins = np.linspace(0, 100, 100)
plt.hist(X_cleaned[0:100,0], bins, alpha=0.25, label='NPE')
plt.hist(X_cleaned[100:198,0], bins, alpha=0.25, label='PE')
plt.legend(loc='upper right')
plt.show()
# In[32]:
def rdm_p_errors():
# planar curves
import numpy as np
import matplotlib.pyplot as plt
with open('planarity_errors_1488999893.39.json') as infile:
c = json.load(infile)
np_errors = np.asarray(c['non_planar_curves_error'])
p_errors = np.asarray(c['planar_curves_error'])
# clean data
NPE = np.insert(np_errors, 1, 1, axis=2)
PE = np.insert(p_errors, 1, 0, axis=2)
X = np.concatenate((NPE, PE), axis=0)
X = X.reshape(200,2)
nan_idx = [i for i, x in enumerate(X) if (math.isnan(x[0]) == True)]
print(nan_idx)
X_cleaned = np.delete(X, nan_idx, axis=0)
X_cleaned.shape
hist, bins = np.histogram(X_cleaned[100:197,0], bins=50)
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
plt.bar(center, hist, align='center', width=width)
plt.show()
def rdm_np_errors():
# planar curves
import numpy as np
import matplotlib.pyplot as plt
with open('planarity_errors_1488999893.39.json') as infile:
c = json.load(infile)
np_errors = np.asarray(c['non_planar_curves_error'])
p_errors = np.asarray(c['planar_curves_error'])
# clean data
NPE = np.insert(np_errors, 1, 1, axis=2)
PE = np.insert(p_errors, 1, 0, axis=2)
X = np.concatenate((NPE, PE), axis=0)
X = X.reshape(200,2)
nan_idx = [i for i, x in enumerate(X) if (math.isnan(x[0]) == True)]
print(nan_idx)
X_cleaned = np.delete(X, nan_idx, axis=0)
X_cleaned.shape
hist, bins = np.histogram(X_cleaned[0:100,0], bins=70)
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
plt.bar(center, hist, align='center', width=width)
plt.show()
# In[36]:
# non-planar curves
#hist, bins = np.histogram(X_cleaned[100:199,0], bins=50)
#width = 0.7 * (bins[1] - bins[0])
#center = (bins[:-1] + bins[1:]) / 2
#plt.bar(center, hist, align='center', width=width)
#plt.show()
| mit |
dream1986/you-get | src/you_get/extractors/lizhi.py | 18 | 1710 | #!/usr/bin/env python
__all__ = ['lizhi_download']
import json
from ..common import *
def lizhi_download_playlist(url, output_dir = '.', merge = True, info_only = False):
# like this http://www.lizhi.fm/#/31365/
#api desc: s->start l->length band->some radio
#http://www.lizhi.fm/api/radio_audios?s=0&l=100&band=31365
band_id = match1(url,r'#/(\d+)')
#try to get a considerable large l to reduce html parsing task.
api_url = 'http://www.lizhi.fm/api/radio_audios?s=0&l=65535&band='+band_id
content_json = json.loads(get_content(api_url))
for sound in content_json:
title = sound["name"]
res_url = sound["url"]
songtype, ext, size = url_info(res_url,faker=True)
print_info(site_info, title, songtype, size)
if not info_only:
#no referer no speed!
download_urls([res_url], title, ext, size, output_dir, merge=merge ,refer = 'http://www.lizhi.fm',faker=True)
pass
def lizhi_download(url, output_dir = '.', merge = True, info_only = False):
# url like http://www.lizhi.fm/#/549759/18864883431656710
api_id = match1(url,r'#/(\d+/\d+)')
api_url = 'http://www.lizhi.fm/api/audio/'+api_id
content_json = json.loads(get_content(api_url))
title = content_json["audio"]["name"]
res_url = content_json["audio"]["url"]
songtype, ext, size = url_info(res_url,faker=True)
print_info(site_info, title, songtype, size)
if not info_only:
#no referer no speed!
download_urls([res_url], title, ext, size, output_dir, merge=merge ,refer = 'http://www.lizhi.fm',faker=True)
site_info = "lizhi.fm"
download = lizhi_download
download_playlist = lizhi_download_playlist
| mit |
barryrobison/anim-studio-tools | review_tool/sources/reviewTool/api/iosystem.py | 5 | 3782 | ##
# \namespace reviewTool.api.io
#
# \remarks Defines different input/output exporters for saving
# and loading data from files
#
# \author Dr. D Studios
# \date 08/18/11
#
from . import io
class IOSystem(object):
_systems = {}
def __init__( self, systemName, fileType = '', imports = False, exports = False ):
self._systemName = systemName
self._fileType = fileType
self._imports = imports
self._exports = exports
def exports( self ):
return self._exports
def fileType( self ):
return self._fileType
def imports( self ):
return self._imports
def load( self, filename ):
"""
Restores the playlist context from the inputed filename
:param filename:
:type <str>:
:return <PlaylistContext> || None:
"""
return None
def save( self, filename, clips ):
"""
Saves the inputed clips to a file
:param filename:
:type <str>:
:param clips:
:type <list> [ <Clip>, .. ]:
:return <bool>: success
"""
return False
def systemName( self ):
return self._systemName
@staticmethod
def find( systemName ):
"""
Looksup the given system by name
:param systemName:
:type <str>:
:return <IOSystem> || None:
"""
io.init()
return IOSystem._systems.get(str(systemName))
@staticmethod
def findByType( fileType ):
"""
Looksup the given system by extension
:param fileType:
:type <str>:
:return <IOSystem> || None:
"""
io.init()
for system in IOSystem._systems.values():
if ( system.fileType() == fileType ):
return system
return None
@staticmethod
def register( systemName, cls, fileType = '', imports = False, exports = False ):
"""
Regsiters the inputd IOSystem class as an input/output
mechanism for loading and saving review tool information
:param fileType:
:type <str>:
:param cls:
:type <subclass of IOSystem>:
:param imports:
:type <bool>:
"""
IOSystem._systems[str(systemName)] = cls(systemName,fileType,imports,exports)
@staticmethod
def systems():
io.init()
return IOSystem._systems.values()
# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios)
#
# This file is part of anim-studio-tools.
#
# anim-studio-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# anim-studio-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with anim-studio-tools. If not, see <http://www.gnu.org/licenses/>.
| gpl-3.0 |
kashefy/nideep | nideep/eval/learning_curve.py | 3 | 2226 | '''
Created on Mar 1, 2016
@author: kashefy
'''
import os
import numpy as np
import parse_log as pl
from eval_utils import Phase
import nideep.iow.file_system_utils as fs
from log_utils import is_caffe_info_log
def cleanup_caffe_logname(fname):
fname = fname.replace('log', '')
fname = fname.replace('caffe', '')
fname = fname.replace('.', '')
return fname
class LearningCurve(object):
'''
classdocs
'''
def list(self, key, phase=Phase.TEST):
if phase.lower() == Phase.TEST.lower():
if 'loss' in key.lower() and 'loss' in self.test_keys:
key = 'loss'
elif 'accuracy' in key.lower() and 'accuracy' in self.test_keys:
key = 'accuracy'
return np.array([[x[key]] for x in self.test_dict_list])
else:
return np.array([[x[key]] for x in self.train_dict_list])
def name(self):
name = os.path.basename(self.path_log)
name, _ = os.path.splitext(name)
return cleanup_caffe_logname(name)
def parse(self):
log_data = pl.parse_log(self.path_log)
# allow for backwards compatibility
if len(log_data) == 4:
self.train_dict_list, self.train_keys, self.test_dict_list, self.test_keys = log_data
else:
self.train_dict_list, self.test_dict_list = log_data
if len(self.train_dict_list) > 0:
self.train_keys = self.train_dict_list[0].keys()
else:
self.train_keys = []
if len(self.test_dict_list) > 0:
self.test_keys = self.test_dict_list[0].keys()
else:
self.test_keys = []
return self.train_keys, self.test_keys
def __init__(self, path_log):
'''
Constructor
'''
self.path_log = path_log
def LearningCurveFromPath(p):
if os.path.isfile(p):
return LearningCurve(p)
elif os.path.isdir(p):
log_paths = fs.gen_paths(p, func_filter=is_caffe_info_log)
if len(log_paths) > 0:
return LearningCurve(log_paths[-1])
else:
return None
else:
raise IOError("%s: No such file or directory" % (p,))
| bsd-2-clause |
cklb/PyMoskito | pymoskito/simulation_core.py | 1 | 10541 | # -*- coding: utf-8 -*-
import logging
import sys
import numpy as np
from copy import deepcopy
from PyQt5.QtCore import QObject, pyqtSignal, pyqtSlot
from .simulation_modules import SimulationException
class SimulationStateChange(object):
"""
Object that is emitted when Simulator changes its state.
Keyword Args:
type: Keyword describing the state change, can be one of the following
* `init` Initialisation
* `start` : Start of Simulation
* `time` : Accomplishment of new progress step
* `finish` : Finish of Simulation
* `abort` : Abortion of Simulation
data: Data that is emitted on state change.
info: Further information.
"""
def __init__(self, **kwargs):
assert "type" in kwargs.keys()
for key, val in kwargs.items():
setattr(self, key, val)
class SimulationSettings(object):
def __init__(self, start_time, end_time, step_size, measure_rate):
self.start_time = start_time
self.end_time = end_time
self.step_size = step_size
self.measure_rate = measure_rate
def to_dict(self):
return {
"start time": self.start_time,
"end time": self.end_time,
"step size": self.step_size,
"measure rate": self.measure_rate,
}
class Simulator(QObject):
"""
This Class executes the time-step integration.
It forms the Core of the physical simulation and interacts with the GUI
via the :py:class:''SimulationInterface`
Calculated values will be stored every 1 / measure rate seconds.
"""
work_done = pyqtSignal()
state_changed = pyqtSignal(SimulationStateChange)
# list of modules that have to appear in every run
static_module_list = [
"Model",
"Solver"
]
# list of modules that might not always appear but have to be calculated
# in a special order
_dynamic_module_list = [
"Disturbance",
"Sensor",
"ObserverMixer",
"Observer",
"Trajectory",
"Feedforward",
"Controller",
"ModelMixer",
"Limiter",
]
module_list = static_module_list + _dynamic_module_list
def __init__(self, settings, modules):
QObject.__init__(self, None)
self._run = False
self._logger = logging.getLogger(self.__class__.__name__)
assert isinstance(settings, SimulationSettings)
self._settings = settings
assert isinstance(modules, dict)
self._simulation_modules = modules
self._init_states()
self._init_settings()
self.updated_time = 0
self._storage = dict()
def _init_states(self):
self._input_vector = {}
self._counter = {}
self._current_outputs = {}
self._current_outputs.update(time=self._settings.start_time)
for mod_name, obj in self._simulation_modules.items():
self._counter.update({mod_name: obj.tick_divider})
self._current_outputs.update({mod_name: []})
self._current_outputs[mod_name] = []
# init model output with current state
self._simulation_modules["Solver"].next_output = np.array(
self._simulation_modules["Model"].initial_state)
def _init_settings(self):
""" Initialize module settings that depend on other modules.
"""
# calculate the correct step width for every block
for mod_name, obj in self._simulation_modules.items():
obj.step_width = obj.tick_divider * self._settings.step_size
return
def _calc_module(self, module_name):
""" Calculates the output of a simulation module
"""
if module_name in self._simulation_modules.keys():
if self._counter[module_name] == \
self._simulation_modules[module_name].tick_divider:
self._current_outputs[module_name] = np.atleast_1d(
self._simulation_modules[module_name].calc_output(
self._input_vector))
self._counter[module_name] = 1
else:
self._counter[module_name] += 1
# update input vector
self._input_vector.update(
{module_name: self._current_outputs[module_name]})
def _calc_step(self):
"""
Calculate one step in simulation.
Warn:
Due to the observers need for the last system input, the values of
the last step are kept in the input vector until they are
overridden. Be careful about which value is needed at which place or
otherwise you end up using a value from the last step.
"""
# update time and current state
self._current_outputs["time"] = self._simulation_modules["Solver"].t
self._input_vector.update(
time=self._current_outputs["time"],
system_state=np.atleast_1d(
self._simulation_modules["Solver"].next_output)
)
# apply new output
self._current_outputs["Model"] = np.atleast_1d(
self._simulation_modules["Model"].calc_output(
self._input_vector["system_state"]))
self._input_vector.update(system_output=self._current_outputs["Model"])
# compute all dynamic modules
for mod in self._dynamic_module_list:
self._calc_module(mod)
# integrate model
self._choose_system_input(self._input_vector)
self._calc_module("Solver")
if 0:
# calculate system state changes
self._current_outputs["State_Changes"] = \
self._simulation_modules["Model"]\
.state_function(self._current_outputs["time"],
self._current_outputs["Solver"],
self._current_outputs["ModelMixer"])
return
def _choose_system_input(self, input_vector):
""" This is mainly done for convenience.
"""
if "Limiter" in input_vector:
_input = input_vector["Limiter"]
elif "ModelMixer" in input_vector:
_input = input_vector["ModelMixer"]
elif "Controller" in input_vector:
if "Feedforward" in input_vector:
raise SimulationException(
"Controller and Feedforward present but no"
"ModelMixer. Ambiguous Situation")
_input = input_vector["Controller"]
elif "Feedforward" in input_vector:
_input = input_vector["Feedforward"]
else:
raise SimulationException("No system input given.")
self._input_vector["system_input"] = _input
def _store_values(self):
"""
store all values of finished integration step
"""
for key, val in self._current_outputs.items():
if key in self._storage:
self._storage[key].append(np.array(val))
else:
self._storage.update({key: [np.array(val)]})
return
def _check_time(self):
"""
send update notification every second
"""
t = self._current_outputs["time"]
if t - self.updated_time > 1:
self.state_changed.emit(SimulationStateChange(type="time", t=t))
# self.timeChanged.emit(t)
self.updated_time = t
@pyqtSlot()
def run(self):
"""
Start the simulation.
"""
self._run = True
self.state_changed.emit(SimulationStateChange(type="start"))
first_run = True
rate = 1 / self._settings.measure_rate
solver = self._simulation_modules["Solver"]
while self._current_outputs["time"] < self._settings.end_time:
t = solver.t
dt = 0
while dt < rate:
if not self._run:
self._abort("Simulation aborted by user")
break
try:
self._calc_step()
except Exception as e:
# catch all to avoid loosing data
self._abort(sys.exc_info())
return
dt = solver.t - t
if dt < rate and first_run:
self._store_values()
first_run = False
self._store_values()
self._check_time()
self._finish()
def _abort(self, info):
""" Overwrite end time with reached time.
"""
self._settings.end_time = self._current_outputs["time"]
self._storage.update(finished=False)
end_state = "abort"
self.state_changed.emit(SimulationStateChange(type=end_state,
data=self.output,
info=info))
self.work_done.emit()
def _finish(self):
self._storage.update(finished=True)
end_state = "finish"
self.state_changed.emit(SimulationStateChange(type=end_state,
data=self.output,
info="Success"))
self.work_done.emit()
@pyqtSlot(name="stop")
def stop(self):
""" Stop the simulation. """
self._run = False
@property
def output(self):
# convert storage entries
out = dict(modules={}, simulation={}, results={})
for mod, results in self._storage.items():
# grab module settings
if mod in self._simulation_modules:
out["modules"].update(
{mod: self._simulation_modules[mod].settings})
# grab module data
if not isinstance(results, list):
# flag or string -> nothing to convert
entry = results
elif isinstance(results[0], np.ndarray):
# convert list of 1d-arrays into 2d-array
entry = np.array(results)
else:
# convert list of scalars into 1d-array
entry = np.array(results)
out["results"].update({mod: entry})
# grab simulator settings
out.update({"simulation": self._settings.to_dict()})
return out
@property
def settings(self):
return self._settings.to_dict()
| bsd-3-clause |
DelazJ/QGIS | python/plugins/sagaprovider/__init__.py | 9 | 1134 | # -*- coding: utf-8 -*-
"""
***************************************************************************
__init__.py
---------------------
Date : May 2021
Copyright : (C) 2021 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'May 2021'
__copyright__ = '(C) 2021, Alexander Bruy'
def classFactory(iface):
from sagaprovider.SagaProviderPlugin import SagaProviderPlugin
return SagaProviderPlugin()
| gpl-2.0 |
avanzosc/UPV | onglibre_financial_source/models/account_analytic_line_ext.py | 1 | 17320 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Avanzosc - Avanced Open Source Consulting
# Copyright (C) 2011 - 2014 Avanzosc <http://www.avanzosc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.addons import decimal_precision as dp
import time
class AccountAnalyticLine(orm.Model):
_inherit = 'account.analytic.line'
def _get_parent_line(self, cr, uid, ids, name, arg, context=None):
res = {}
journal_obj = self.pool['account.analytic.journal']
account_obj = self.pool['account.account']
project_obj = self.pool['project.project']
for obj in self.browse(cr, uid, ids, context):
res[obj.id] = False
if obj.account_analytic_line_financing_source_id:
res[obj.id] = obj.account_analytic_line_financing_source_id.id
continue
if obj.account_analytic_line_budgetary_id:
res[obj.id] = obj.account_analytic_line_budgetary_id.id
continue
if obj.type == 'budgetary':
cond = [('is_project', '=', True),
('account_id', '=', obj.account_id.id)]
line_list = self.search(cr, uid, cond, context=context)
if line_list:
res[obj.id] = line_list[0]
else:
cond = [('analytic_account_id', '=', obj.account_id.id)]
project_list = project_obj.search(
cr, uid, cond, context=context)
if project_list:
project_id = project_list[0]
vals = {'is_project': True,
'account_id': obj.account_id.id,
'name': obj.account_id.name,
'active': True,
'project_id': project_id,
'general_account_id': account_obj.search(
cr, uid, [], context)[0],
'journal_id': journal_obj.search(
cr, uid, [], context)[0],
'date': time.strftime('%Y-%m-%d')
}
line_id = self.create(cr, uid, vals, context)
res[obj.id] = line_id
continue
if obj.is_project:
if obj.account_id.parent_id:
cond = [('is_project', '=', True),
('account_id', '=', obj.account_id.parent_id.id)]
line_list = self.search(cr, uid, cond, context=context)
if line_list:
res[obj.id] = line_list[0]
else:
cond = [('analytic_account_id', '=',
obj.account_id.parent_id.id)]
project_list = project_obj.search(
cr, uid, cond, context=context)
if project_list:
project_id = project_list[0]
vals = {'is_project': True,
'account_id': obj.account_id.parent_id.id,
'name': obj.account_id.parent_id.name,
'active': True,
'project_id': project_id,
'journal_id': journal_obj.search(
cr, uid, [], context=context)[0],
'general_account_id': account_obj.search(
cr, uid, [], context=context)[0],
'date': time.strftime('%Y-%m-%d')
}
line_id = self.create(cr, uid, vals, context)
res[obj.id] = line_id
continue
return res
def _child_compute(self, cr, uid, ids, name, arg, context=None):
result = {}
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
result[line.id] = map(lambda x: x.id, [child for child in
line.child_ids])
return result
def _sum_available_expense(self, cr, uid, ids, name, arg, context=None):
res = {}
for obj in self.browse(cr, uid, ids, context=context):
# res[obj.id] = (obj.sum_expense_budget +
# obj.sum_updated_expense_budget +
# obj.sum_remainder
# - obj.sum_real_expense -
# obj.sum_expense_compromised -
# obj.sum_expense_request)
res[obj.id] = (obj.sum_updated_expense_budget + obj.sum_remainder
- obj.sum_real_expense -
obj.sum_expense_compromised -
obj.sum_expense_request)
return res
# Calculo total Asignado
def _sum_assigned(self, cr, uid, ids, name, arg, context=None):
res = {}
for obj in self.browse(cr, uid, ids, context=context):
if not obj.type:
res[obj.id] = 0
else:
if obj.type in ('imputation', 'initial_financial_source',
'modif_financial_source'):
res[obj.id] = obj.assigned
else:
if obj.type == 'financing_source':
w_imp = 0
cond = [('project_id', '=', obj.project_id.id),
('type', '=', 'initial_financial_source'),
('account_analytic_line_financing_source_id',
'=', obj.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
cond = [('project_id', '=', obj.project_id.id),
('type', '=', 'modif_financial_source'),
('account_analytic_line_financing_source_id',
'=', obj.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
cond = [('project_id', '=', obj.project_id.id),
('type', '=', 'imputation'),
('account_analytic_line_financing_source_id',
'=', obj.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
res[obj.id] = w_imp
else:
if obj.type == 'budgetary':
w_imp = 0
cond = [('project_id', '=', obj.project_id.id),
('type', '=', 'financing_source'),
('account_analytic_line_budgetary_id',
'=', obj.id)]
budgetary_ids = self.search(
cr, uid, cond, context=context)
for budgetary_id in budgetary_ids:
budgetary = self.browse(
cr, uid, budgetary_id, context)
cond = [('project_id', '=', obj.project_id.id),
('type', '=',
'initial_financial_source'),
('account_analytic_line_financing_'
'source_id', '=', budgetary.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
cond = [('project_id', '=', obj.project_id.id),
('type', '=',
'modif_financial_source'),
('account_analytic_line_financing_'
'source_id', '=', budgetary.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
cond = [('project_id', '=', obj.project_id.id),
('type', '=', 'imputation'),
('account_analytic_line_financing_'
'source_id', '=', budgetary.id)]
modif_financing_ids = self.search(
cr, uid, cond, context=context)
for modif_financing_id in modif_financing_ids:
modif_financing = self.browse(
cr, uid, modif_financing_id, context)
w_imp = w_imp + modif_financing.assigned
res[obj.id] = w_imp
else:
res[obj.id] = 0
return res
_columns = {
# CAMPOS REFERENTES A FUENTE DE FINANCIACIÓN (TAREA 2.2.D)
# Asignado
'assigned': fields.float('Assigned',
digits_compute=dp.get_precision('Account')),
# Pagado
'paid': fields.float('Paid',
digits_compute=dp.get_precision('Account')),
# Área de Gasto
'expense_area_id': fields.many2one('expense.area', 'Expense Area'),
# Tipo de Apunte
'type':
fields.selection([('imputation', 'Imputation'),
('budgetary', 'Budgetary'),
('financing_source', 'Financing Source'),
('justification', 'Justification'),
('initial_budgetary', 'Initial Budgetary'),
('modif_budgetary', 'Modification Budgetary'),
('initial_financial_source',
'Initial Financing Source'),
('modif_financial_source',
'Modif. Financing Source')], 'Account Type'),
# Padre Apunte Fondo Financiador
'account_analytic_line_financing_source_id':
fields.many2one('account.analytic.line',
'Financing Source Parent',
domain=[('type', '=', 'financing_source')]),
# Padre Apunte Presupuestaria
'account_analytic_line_budgetary_id':
fields.many2one('account.analytic.line', 'Budgetary Parent',
domain=[('type', '=', 'budgetary')]),
# Padre Apunte Presupuestaria sólo lectura
'account_analytic_line_budgetary_readonly_id':
fields.many2one('account.analytic.line', 'Budgetary Parent',
store=False, domain=[('type', '=', 'budgetary')],
attrs={'invisible': ['|', ('type', '=',
'budgetary'),
('type', '=',
'financing_source')],
'readonly': [('type', '=',
'justification')]}),
'account_parent_id':
fields.related('account_id', 'parent_id', type="many2one",
relation="account.analytic.account",
string="Parent account", store=True, readonly=True),
'parent_line_id':
fields.function(_get_parent_line, type="many2one",
relation="account.analytic.line",
string="Parent line", store=True),
'child_ids': fields.one2many('account.analytic.line',
'parent_line_id', 'Child Lines'),
'child_complete_ids':
fields.function(_child_compute, relation='account.analytic.line',
string="Line Hierarchy", type='many2many'),
'is_project': fields.boolean('Is project'),
# Campos totales
# Asignado
'sum_assigned':
fields.function(_sum_assigned, string='Sum Assigned', type='float',
digits_compute=dp.get_precision('Account'),
group_operator="sum"),
# Gasto disponible
'sum_available_expense':
fields.function(_sum_available_expense,
string='Sum Available Expense', type='float',
digits_compute=dp.get_precision('Account'),
readonly=True),
# Fechas Justificaction
'justification_date_ids':
fields.many2many('justification.date',
'justifdate_analyticline_rel',
'account_analytic_line_id',
'justification_date_id', 'Justification Dates'),
}
_defaults = {'type': lambda self, cr, uid, c: c.get('type', False),
}
def create(self, cr, uid, vals, context=None):
if 'account_analytic_line_budgetary_id' in vals:
vals['account_analytic_line_budgetary_readonly_id'] = (
vals.get('account_analytic_line_budgetary_id'))
res = super(AccountAnalyticLine, self).create(cr, uid, vals,
context=context)
return res
def write(self, cr, uid, ids, vals, context=None):
if 'account_analytic_line_budgetary_id' in vals:
vals['account_analytic_line_budgetary_readonly_id'] = (
vals.get('account_analytic_line_budgetary_id'))
return super(AccountAnalyticLine, self).write(cr, uid, ids, vals,
context=context)
# Función que asigna el padre presupuestario a una línea de tipo
# justificación
def onchange_account_analytic_line_financing_source(
self, cr, uid, ids, account_analytic_line_financing_source_id):
analytic_line_obj = self.pool['account.analytic.line']
data = {}
if account_analytic_line_financing_source_id:
line = analytic_line_obj.browse(
cr, uid, account_analytic_line_financing_source_id)
if line.account_analytic_line_budgetary_id:
budg_id = line.account_analytic_line_budgetary_id.id
data = {'account_analytic_line_budgetary_id': budg_id,
'account_analytic_line_budgetary_readonly_id': budg_id
}
return {'value': data}
| agpl-3.0 |
emon10005/sympy | bin/sympy_time_cache.py | 113 | 3195 | from __future__ import print_function
import time
import timeit
class TreeNode(object):
def __init__(self, name):
self._name = name
self._children = []
self._time = 0
def __str__(self):
return "%s: %s" % (self._name, self._time)
__repr__ = __str__
def add_child(self, node):
self._children.append(node)
def children(self):
return self._children
def child(self, i):
return self.children()[i]
def set_time(self, time):
self._time = time
def time(self):
return self._time
total_time = time
def exclusive_time(self):
return self.total_time() - sum(child.time() for child in self.children())
def name(self):
return self._name
def linearize(self):
res = [self]
for child in self.children():
res.extend(child.linearize())
return res
def print_tree(self, level=0, max_depth=None):
print(" "*level + str(self))
if max_depth is not None and max_depth <= level:
return
for child in self.children():
child.print_tree(level + 1, max_depth=max_depth)
def print_generic(self, n=50, method="time"):
slowest = sorted((getattr(node, method)(), node.name()) for node in self.linearize())[-n:]
for time, name in slowest[::-1]:
print("%s %s" % (time, name))
def print_slowest(self, n=50):
self.print_generic(n=50, method="time")
def print_slowest_exclusive(self, n=50):
self.print_generic(n, method="exclusive_time")
def write_cachegrind(self, f):
if isinstance(f, str):
f = open(f, "w")
f.write("events: Microseconds\n")
f.write("fl=sympyallimport\n")
must_close = True
else:
must_close = False
f.write("fn=%s\n" % self.name())
f.write("1 %s\n" % self.exclusive_time())
counter = 2
for child in self.children():
f.write("cfn=%s\n" % child.name())
f.write("calls=1 1\n")
f.write("%s %s\n" % (counter, child.time()))
counter += 1
f.write("\n\n")
for child in self.children():
child.write_cachegrind(f)
if must_close:
f.close()
pp = TreeNode(None) # We have to use pp since there is a sage function
#called parent that gets imported
seen = set()
def new_import(name, globals={}, locals={}, fromlist=[]):
global pp
if name in seen:
return old_import(name, globals, locals, fromlist)
seen.add(name)
node = TreeNode(name)
pp.add_child(node)
old_pp = pp
pp = node
#Do the actual import
t1 = timeit.default_timer()
module = old_import(name, globals, locals, fromlist)
t2 = timeit.default_timer()
node.set_time(int(1000000*(t2 - t1)))
pp = old_pp
return module
old_import = __builtins__.__import__
__builtins__.__import__ = new_import
old_sum = sum
from sympy import *
sum = old_sum
sageall = pp.child(0)
sageall.write_cachegrind("sympy.cachegrind")
print("Timings saved. Do:\n$ kcachegrind sympy.cachegrind")
| bsd-3-clause |
AnimationInVR/avango | attic/examples/shader/shader.py | 6 | 11788 | # -*- Mode:Python -*-
##########################################################################
# #
# This file is part of AVANGO. #
# #
# Copyright 1997 - 2009 Fraunhofer-Gesellschaft zur Foerderung der #
# angewandten Forschung (FhG), Munich, Germany. #
# #
# AVANGO is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Lesser General Public License as #
# published by the Free Software Foundation, version 3. #
# #
# AVANGO is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public #
# License along with AVANGO. If not, see <http://www.gnu.org/licenses/>. #
# #
##########################################################################
import math
import avango.osg
import avango.inspector
import avango.display
import sys
argv = avango.display.init(sys.argv)
view = avango.display.make_view()
view.EnableTrackball.value = True
###############################################################################
# some light sources -> see also below the NumLights Uniform for the shader
light0 = avango.osg.nodes.Light(LightNum=0,
Ambient=avango.osg.Vec4(0.2,0.2,0.2,0.2),
Diffuse=avango.osg.Vec4(0.7,0.7,0.7,1.0),
Specular=avango.osg.Vec4(1.0,1.0,1.0,1.0),
Position=avango.osg.Vec4(3.0,3.0,3.0,1.0))
lightsource0 = avango.osg.nodes.LightSource(Light=light0)
light1 = avango.osg.nodes.Light(LightNum=1,
Ambient=avango.osg.Vec4(0.2,0.2,0.2,0.2),
Diffuse=avango.osg.Vec4(0.7,0.7,0.7,1.0),
Specular=avango.osg.Vec4(1.0,1.0,1.0,1.0),
Position=avango.osg.Vec4(-3.0,3.0,3.0,1.0))
lightsource1 = avango.osg.nodes.LightSource(Light=light1)
light2 = avango.osg.nodes.Light(LightNum=2,
Ambient=avango.osg.Vec4(0.2,0.2,0.2,0.2),
Diffuse=avango.osg.Vec4(0.7,0.7,0.7,1.0),
Specular=avango.osg.Vec4(1.0,1.0,1.0,1.0),
Position=avango.osg.Vec4(0.0,-3.0,3.0,1.0))
lightsource2 = avango.osg.nodes.LightSource(Light=light2)
light3 = avango.osg.nodes.Light(LightNum=3,
Ambient=avango.osg.Vec4(0.2,0.2,0.2,0.2),
Diffuse=avango.osg.Vec4(0.7,0.7,0.7,1.0),
Specular=avango.osg.Vec4(1.0,1.0,1.0,1.0),
Position=avango.osg.Vec4(0.0,0.0,-3.0,1.0))
lightsource3 = avango.osg.nodes.LightSource(Light=light3)
###############################################################################
# an example shader program
vshaderfile = "vshader.vert"
fshaderfile = "fshader.frag"
vshader = avango.osg.nodes.Shader(Name="VertexShader",
Type=avango.osg.shadertype.VERTEX,
FileName=vshaderfile)
fshader = avango.osg.nodes.Shader(Name="FragmentShader",
Type=avango.osg.shadertype.FRAGMENT,
FileName=fshaderfile)
prog = avango.osg.nodes.Program(ShaderList=[vshader,fshader])
ss = avango.osg.nodes.StateSet(Program = prog)
# Important: Values have to be specified first !!!
ss.Uniforms.value = [avango.osg.nodes.Uniform(
Values=[4.0],
Type=avango.osg.uniformtype.FLOAT,
UniformName="NumLights"
)
]
###############################################################################
# some example models
model1 = avango.osg.nodes.Sphere(StateSet = avango.osg.nodes.StateSet(Uniforms = [avango.osg.nodes.Uniform( Values=[0.2,0.0,0.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="ambient"),
avango.osg.nodes.Uniform( Values=[0.7,0.0,0.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="diffuse"),
avango.osg.nodes.Uniform( Values=[1.0,1.0,1.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="specular"),
avango.osg.nodes.Uniform( Values=[256.0],
Type=avango.osg.uniformtype.FLOAT,
UniformName="shininess"),
]))
model1.Matrix.value = avango.osg.make_trans_mat(-5.0, 0.0, 0.0) * avango.osg.make_scale_mat(0.1, 0.1, 0.1)
model2 = avango.osg.nodes.Sphere(StateSet = avango.osg.nodes.StateSet(Uniforms = [avango.osg.nodes.Uniform( Values=[0.0,0.2,0.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="ambient"),
avango.osg.nodes.Uniform( Values=[0.0,0.7,0.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="diffuse"),
avango.osg.nodes.Uniform( Values=[1.0,1.0,1.0,1.0],
Type=avango.osg.uniformtype.FLOAT_VEC4,
UniformName="specular"),
avango.osg.nodes.Uniform( Values=[256.0],
Type=avango.osg.uniformtype.FLOAT,
UniformName="shininess"),
]))
model2.Matrix.value = avango.osg.make_trans_mat(2.0, 0.0, 0.0) * avango.osg.make_scale_mat(0.2, 0.2, 0.2)
###############################################################################
# build the scenegraph
modelGroup = avango.osg.nodes.Group(StateSet=ss)
modelGroup.Children.value.append(model1)
modelGroup.Children.value.append(model2)
lightsource3.Children.value = [modelGroup]
lightsource2.Children.value = [lightsource3]
lightsource1.Children.value = [lightsource2]
lightsource0.Children.value = [lightsource1]
root = avango.osg.nodes.Group()
root.Children.value = [lightsource0]
###############################################################################
# visualize the Lightsources in Wireframe
wirestate = avango.osg.nodes.StateSet(WireframeMode = 1,
Program = avango.osg.nodes.Program(ShaderList=[
avango.osg.nodes.Shader(Name="VertexShader",Type=avango.osg.shadertype.VERTEX,ShaderSource="void main() { gl_Position = ftransform(); }"),
avango.osg.nodes.Shader(Name="VertexShader",Type=avango.osg.shadertype.FRAGMENT,ShaderSource="void main() { gl_FragColor = gl_FrontMaterial.diffuse; }")
]))
lightcolor = avango.osg.Vec4(1.0,0.8,0.0,1.0)
root.Children.value.append(avango.osg.nodes.Sphere(StateSet = wirestate,
Color = lightcolor,
Matrix = avango.osg.make_scale_mat(0.1,0.1,0.1) *
avango.osg.make_trans_mat(light0.Position.value.x,
light0.Position.value.y,
light0.Position.value.z,)))
root.Children.value.append(avango.osg.nodes.Sphere(StateSet = wirestate,
Color = lightcolor,
Matrix = avango.osg.make_scale_mat(0.1,0.1,0.1) *
avango.osg.make_trans_mat(light1.Position.value.x,
light1.Position.value.y,
light1.Position.value.z,)))
root.Children.value.append(avango.osg.nodes.Sphere(StateSet = wirestate,
Color = lightcolor,
Matrix = avango.osg.make_scale_mat(0.1,0.1,0.1) *
avango.osg.make_trans_mat(light2.Position.value.x,
light2.Position.value.y,
light2.Position.value.z,)))
root.Children.value.append(avango.osg.nodes.Sphere(StateSet = wirestate,
Color = lightcolor,
Matrix = avango.osg.make_scale_mat(0.1,0.1,0.1) *
avango.osg.make_trans_mat(light3.Position.value.x,
light3.Position.value.y,
light3.Position.value.z,)))
###############################################################################
# run the example
view.Root.value = root
avango.display.run()
| lgpl-3.0 |
zhouzhenghui/python-for-android | python-build/python-libs/gdata/src/gdata/oauth/__init__.py | 157 | 19407 | import cgi
import urllib
import time
import random
import urlparse
import hmac
import binascii
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
# Generic exception class
class OAuthError(RuntimeError):
def __init__(self, message='OAuth error occured.'):
self.message = message
# optional WWW-Authenticate header (401 error)
def build_authenticate_header(realm=''):
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
# url escape
def escape(s):
# escape '/' too
return urllib.quote(s, safe='~')
# util function: current timestamp
# seconds since epoch (UTC)
def generate_timestamp():
return int(time.time())
# util function: nonce
# pseudorandom number
def generate_nonce(length=8):
return ''.join([str(random.randint(0, 9)) for i in range(length)])
# OAuthConsumer is a data type that represents the identity of the Consumer
# via its shared secret with the Service Provider.
class OAuthConsumer(object):
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
# OAuthToken is a data type that represents an End User via either an access
# or request token.
class OAuthToken(object):
# access tokens and request tokens
key = None
secret = None
'''
key = the token
secret = the token secret
'''
def __init__(self, key, secret):
self.key = key
self.secret = secret
def to_string(self):
return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret})
# return a token from something like:
# oauth_token_secret=digg&oauth_token=digg
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
return OAuthToken(key, secret)
from_string = staticmethod(from_string)
def __str__(self):
return self.to_string()
# OAuthRequest represents the request and can be serialized
class OAuthRequest(object):
'''
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
... any additional parameters, as defined by the Service Provider.
'''
parameters = None # oauth parameters
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter('oauth_nonce')
# get any non-oauth parameters
def get_nonoauth_parameters(self):
parameters = {}
for k, v in self.parameters.iteritems():
# ignore oauth parameters
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
# serialize as a header for an HTTPAuth request
def to_header(self, realm=''):
auth_header = 'OAuth realm="%s"' % realm
# add the oauth parameters
if self.parameters:
for k, v in self.parameters.iteritems():
if k[:6] == 'oauth_':
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header}
# serialize as post data for a POST request
def to_postdata(self):
return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) for k, v in self.parameters.iteritems()])
# serialize as a url for a GET request
def to_url(self):
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
# return a string that consists of all the parameters that need to be signed
def get_normalized_parameters(self):
params = self.parameters
try:
# exclude the signature if it exists
del params['oauth_signature']
except:
pass
key_values = params.items()
# sort lexicographically, first after key, then after value
key_values.sort()
# combine key value pairs in string and escape
return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) for k, v in key_values])
# just uppercases the http method
def get_normalized_http_method(self):
return self.http_method.upper()
# parses the url and rebuilds it to be scheme://host/path
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url)
url_string = '%s://%s%s' % (parts[0], parts[1], parts[2]) # scheme, netloc, path
return url_string
# set the signature parameter to the result of build_signature
def sign_request(self, signature_method, consumer, token):
# set the signature method
self.set_parameter('oauth_signature_method', signature_method.get_name())
# set the signature
self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
# call the build signature method within the signature method
return signature_method.build_signature(self, consumer, token)
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
# combine multiple parameter sources
if parameters is None:
parameters = {}
# headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# check that the authorization header is OAuth
if auth_header.index('OAuth') > -1:
try:
# get the parameters from the header
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from Authorization header.')
# GET or POST query string
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
from_request = staticmethod(from_request)
def from_consumer_and_token(oauth_consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return OAuthRequest(http_method, http_url, parameters)
from_consumer_and_token = staticmethod(from_consumer_and_token)
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return OAuthRequest(http_method, http_url, parameters)
from_token_and_callback = staticmethod(from_token_and_callback)
# util function: turn Authorization: header into parameters, has to do some unescaping
def _split_header(header):
params = {}
parts = header.split(',')
for param in parts:
# ignore realm parameter
if param.find('OAuth realm') > -1:
continue
# remove whitespace
param = param.strip()
# split key-value
param_parts = param.split('=', 1)
# remove quotes and unescape the value
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
_split_header = staticmethod(_split_header)
# util function: turn url string into parameters, has to do some unescaping
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
_split_url_string = staticmethod(_split_url_string)
# OAuthServer is a worker to check a requests validity against a data store
class OAuthServer(object):
timestamp_threshold = 300 # in seconds, five minutes
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, oauth_data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
# process a request_token request
# returns the request token on success
def fetch_request_token(self, oauth_request):
try:
# get the request token for authorization
token = self._get_token(oauth_request, 'request')
except OAuthError:
# no token required for the initial token request
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
self._check_signature(oauth_request, consumer, None)
# fetch a new token
token = self.data_store.fetch_request_token(consumer)
return token
# process an access_token request
# returns the access token on success
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the request token
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token)
return new_token
# verify an api call, checks all the parameters
def verify_request(self, oauth_request):
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the access token
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
# authorize a request token
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)
# get the callback url
def get_callback(self, oauth_request):
return oauth_request.get_parameter('oauth_callback')
# optional support for the authenticate header
def build_authenticate_header(self, realm=''):
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
# verify the correct version request for this server
def _get_version(self, oauth_request):
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
# figure out the signature with some defaults
def _get_signature_method(self, oauth_request):
try:
signature_method = oauth_request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# get the signature method object
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
if not consumer_key:
raise OAuthError('Invalid consumer key.')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
# try to find the token for the provided request token key
def _get_token(self, oauth_request, token_type='access'):
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# validate the signature
valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
# verify that timestamp is recentish
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
# verify that the nonce is uniqueish
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
# OAuthClient is a worker to attempt to execute a request
class OAuthClient(object):
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def access_resource(self, oauth_request):
# -> some protected resource
raise NotImplementedError
# OAuthDataStore is a database abstraction used to lookup consumers and tokens
class OAuthDataStore(object):
def lookup_consumer(self, key):
# -> OAuthConsumer
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
# -> OAuthToken
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce, timestamp):
# -> OAuthToken
raise NotImplementedError
def fetch_request_token(self, oauth_consumer):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token):
# -> OAuthToken
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
# -> OAuthToken
raise NotImplementedError
# OAuthSignatureMethod is a strategy class that implements a signature method
class OAuthSignatureMethod(object):
def get_name(self):
# -> str
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
# -> str key, str raw
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
# -> str
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
# build the base signature string
key, raw = self.build_signature_base_string(oauth_request, consumer, token)
# hmac object
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, raw, sha)
# calculate the digest base 64
return binascii.b2a_base64(hashed.digest())[:-1]
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
# concatenate the consumer key and secret
sig = escape(consumer.secret) + '&'
if token:
sig = sig + escape(token.secret)
return sig
def build_signature(self, oauth_request, consumer, token):
return self.build_signature_base_string(oauth_request, consumer, token)
| apache-2.0 |
gulopine/steel | examples/mods/mod.py | 1 | 3038 | import steel
from steel import bits
class FineTune(bits.Structure):
bits.Reserved(size=4)
value = bits.Integer(size=4)
class SampleLength(bits.Integer):
def encode(self, value):
return int(value / 2)
def decode(self, value):
return value * 2
class Sample(steel.Structure):
name = steel.String(size=22, encoding='ascii')
size = SampleLength(size=2)
finetune = steel.SubStructure(FineTune)
volume = steel.Integer(size=1)
loop_start = SampleLength(size=2, default=0)
loop_length = SampleLength(size=2, default=0)
@property
def loop_end(self):
return self.loop_start + self.loop_length
@property
def data(self):
index = self.get_parent().samples.index(self)
return self.get_parent().sample_data[index]
def __unicode__(self):
return self.name
class Note(bits.Structure):
sample_hi = bits.Integer(size=4)
period = bits.Integer(size=12)
sample_lo = bits.Integer(size=4)
effect = bits.Integer(size=12)
@property
def sample(self):
index = self.sample_hi << 4 + self.sample_lo
return self.get_parent().samples[index]
@sample.setter
def sample(self, sample):
index = self.get_parent().samples.index(sample)
self.sample_hi = index >> 4
self.sample_lo = index & 0xF
class Row(steel.Structure):
notes = steel.List(Note, size=lambda self: self.get_parent().channels)
def __iter__(self):
return iter(self.rows)
class Pattern(steel.Structure):
rows = steel.List(Row, size=64)
def __iter__(self):
return iter(self.rows)
class MOD(steel.Structure, endianness=steel.BigEndian):
channels = 4
title = steel.String(size=20, encoding='ascii')
samples = steel.List(Sample, size=15)
order_count = steel.Integer(size=1)
restart_position = steel.Integer(size=1)
pattern_order = steel.List(steel.Integer(size=1), size=128)
marker = steel.FixedString('M.K.')
patterns = steel.List(Pattern, size=lambda self: max(self.pattern_order) + 1)
sample_data = steel.Bytes(size=steel.Remainder)
@property
def pattern_count(self):
return max(self.order) + 1
@sample_data.getter
def sample_data(self, data):
offset = 0
output = []
for info in self.samples:
output.append(data[offset:offset + info.size])
offset += info.size
return output
@sample_data.setter
def sample_data(self, data_list):
return ''.join(data_list)
def __iter__(self):
for index in self.pattern_order:
yield self.patterns[index]
def __unicode__(self):
return self.title
if __name__ == '__main__':
for format in (MOD,):
track = format(open(sys.argv[1], 'rb'))
print('%s: %s' % (format.__name__, track.title))
| bsd-3-clause |
spaceof7/QGIS | tests/src/python/test_qgslayoutpolyline.py | 13 | 12207 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutItemPolyline.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2016 by Paul Blottiere'
__date__ = '14/03/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.PyQt.QtGui import QPolygonF
from qgis.PyQt.QtCore import QPointF
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsLayoutItemPolyline,
QgsLayoutItemRegistry,
QgsLayout,
QgsLineSymbol,
QgsProject,
QgsReadWriteContext)
from qgis.testing import (start_app,
unittest
)
from utilities import unitTestDataPath
from qgslayoutchecker import QgsLayoutChecker
from test_qgslayoutitem import LayoutItemTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsLayoutPolyline(unittest.TestCase, LayoutItemTestCase):
@classmethod
def setUpClass(cls):
cls.item_class = QgsLayoutItemPolyline
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
# create composition
self.layout = QgsLayout(QgsProject.instance())
self.layout.initializeDefaults()
# create
polygon = QPolygonF()
polygon.append(QPointF(0.0, 0.0))
polygon.append(QPointF(100.0, 0.0))
polygon.append(QPointF(200.0, 100.0))
polygon.append(QPointF(100.0, 200.0))
self.polyline = QgsLayoutItemPolyline(
polygon, self.layout)
self.layout.addLayoutItem(self.polyline)
# style
props = {}
props["color"] = "0,0,0,255"
props["width"] = "10.0"
props["capstyle"] = "square"
style = QgsLineSymbol.createSimple(props)
self.polyline.setSymbol(style)
def testNodes(self):
polygon = QPolygonF()
polygon.append(QPointF(0.0, 0.0))
polygon.append(QPointF(100.0, 0.0))
polygon.append(QPointF(200.0, 100.0))
polygon.append(QPointF(100.0, 200.0))
p = QgsLayoutItemPolyline(polygon, self.layout)
self.assertEqual(p.nodes(), polygon)
polygon = QPolygonF()
polygon.append(QPointF(0.0, 0.0))
polygon.append(QPointF(1000.0, 0.0))
polygon.append(QPointF(2000.0, 100.0))
polygon.append(QPointF(1000.0, 200.0))
p.setNodes(polygon)
self.assertEqual(p.nodes(), polygon)
def testDisplayName(self):
"""Test if displayName is valid"""
self.assertEqual(self.polyline.displayName(), "<Polyline>")
def testType(self):
"""Test if type is valid"""
self.assertEqual(
self.polyline.type(), QgsLayoutItemRegistry.LayoutPolyline)
def testDefaultStyle(self):
"""Test polygon rendering with default style."""
self.polyline.setDisplayNodes(False)
checker = QgsLayoutChecker(
'composerpolyline_defaultstyle', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testDisplayNodes(self):
"""Test displayNodes method"""
self.polyline.setDisplayNodes(True)
checker = QgsLayoutChecker(
'composerpolyline_displaynodes', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
self.polyline.setDisplayNodes(False)
checker = QgsLayoutChecker(
'composerpolyline_defaultstyle', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testSelectedNode(self):
"""Test selectedNode and deselectNode methods"""
self.polyline.setDisplayNodes(True)
self.polyline.setSelectedNode(3)
checker = QgsLayoutChecker(
'composerpolyline_selectednode', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
self.polyline.deselectNode()
self.polyline.setDisplayNodes(False)
checker = QgsLayoutChecker(
'composerpolyline_defaultstyle', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testRemoveNode(self):
"""Test removeNode method"""
rc = self.polyline.removeNode(100)
self.assertEqual(rc, False)
checker = QgsLayoutChecker(
'composerpolyline_defaultstyle', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.removeNode(3)
self.assertEqual(rc, True)
self.assertEqual(self.polyline.nodesSize(), 3)
checker = QgsLayoutChecker(
'composerpolyline_removednode', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testAddNode(self):
"""Test addNode method"""
# default searching radius is 10
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.addNode(QPointF(50.0, 10.0))
self.assertEqual(rc, False)
# default searching radius is 10
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.addNode(QPointF(50.0, 9.99))
self.assertEqual(rc, True)
self.assertEqual(self.polyline.nodesSize(), 5)
def testAddNodeCustomRadius(self):
"""Test addNode with custom radius"""
# default searching radius is 10
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.addNode(QPointF(50.0, 8.1), True, 8.0)
self.assertEqual(rc, False)
self.assertEqual(self.polyline.nodesSize(), 4)
# default searching radius is 10
rc = self.polyline.addNode(QPointF(50.0, 7.9), True, 8.0)
self.assertEqual(rc, True)
self.assertEqual(self.polyline.nodesSize(), 5)
def testAddNodeWithoutCheckingArea(self):
"""Test addNode without checking the maximum distance allowed"""
# default searching radius is 10
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.addNode(QPointF(50.0, 20.0))
self.assertEqual(rc, False)
self.assertEqual(self.polyline.nodesSize(), 4)
# default searching radius is 10
self.assertEqual(self.polyline.nodesSize(), 4)
rc = self.polyline.addNode(QPointF(50.0, 20.0), False)
self.assertEqual(rc, True)
self.assertEqual(self.polyline.nodesSize(), 5)
checker = QgsLayoutChecker(
'composerpolyline_addnode', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testMoveNode(self):
"""Test moveNode method"""
rc = self.polyline.moveNode(30, QPointF(100.0, 300.0))
self.assertEqual(rc, False)
rc = self.polyline.moveNode(3, QPointF(100.0, 150.0))
self.assertEqual(rc, True)
checker = QgsLayoutChecker(
'composerpolyline_movenode', self.layout)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testNodeAtPosition(self):
"""Test nodeAtPosition method"""
# default searching radius is 10
rc = self.polyline.nodeAtPosition(QPointF(100.0, 210.0))
self.assertEqual(rc, -1)
# default searching radius is 10
rc = self.polyline.nodeAtPosition(
QPointF(100.0, 210.0), False)
self.assertEqual(rc, 3)
# default searching radius is 10
rc = self.polyline.nodeAtPosition(
QPointF(100.0, 210.0), True, 10.1)
self.assertEqual(rc, 3)
def testReadWriteXml(self):
pr = QgsProject()
l = QgsLayout(pr)
p = QPolygonF()
p.append(QPointF(0.0, 0.0))
p.append(QPointF(100.0, 0.0))
p.append(QPointF(200.0, 100.0))
shape = QgsLayoutItemPolyline(p, l)
props = {}
props["color"] = "255,0,0,255"
props["width"] = "10.0"
props["capstyle"] = "square"
style = QgsLineSymbol.createSimple(props)
shape.setSymbol(style)
#save original item to xml
doc = QDomDocument("testdoc")
elem = doc.createElement("test")
self.assertTrue(shape.writeXml(elem, doc, QgsReadWriteContext()))
shape2 = QgsLayoutItemPolyline(l)
self.assertTrue(shape2.readXml(elem.firstChildElement(), doc, QgsReadWriteContext()))
self.assertEqual(shape2.nodes(), shape.nodes())
self.assertEqual(shape2.symbol().symbolLayer(0).color().name(), '#ff0000')
def testBounds(self):
pr = QgsProject()
l = QgsLayout(pr)
p = QPolygonF()
p.append(QPointF(50.0, 30.0))
p.append(QPointF(100.0, 10.0))
p.append(QPointF(200.0, 100.0))
shape = QgsLayoutItemPolyline(p, l)
props = {}
props["color"] = "255,0,0,255"
props["width"] = "6.0"
props["capstyle"] = "square"
style = QgsLineSymbol.createSimple(props)
shape.setSymbol(style)
# scene bounding rect should include symbol outline
bounds = shape.sceneBoundingRect()
self.assertEqual(bounds.left(), 47.0)
self.assertEqual(bounds.right(), 203.0)
self.assertEqual(bounds.top(), 7.0)
self.assertEqual(bounds.bottom(), 103.0)
# rectWithFrame should include symbol outline too
bounds = shape.rectWithFrame()
self.assertEqual(bounds.left(), -3.0)
self.assertEqual(bounds.right(), 153.0)
self.assertEqual(bounds.top(), -3.0)
self.assertEqual(bounds.bottom(), 93.0)
def testHorizontalLine(self):
pr = QgsProject()
l = QgsLayout(pr)
l.initializeDefaults()
p = QPolygonF()
p.append(QPointF(50.0, 100.0))
p.append(QPointF(100.0, 100.0))
shape = QgsLayoutItemPolyline(p, l)
l.addLayoutItem(shape)
props = {}
props["color"] = "0,0,0,255"
props["width"] = "10.0"
props["capstyle"] = "square"
style = QgsLineSymbol.createSimple(props)
shape.setSymbol(style)
checker = QgsLayoutChecker(
'composerpolyline_hozline', l)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
def testVerticalLine(self):
pr = QgsProject()
l = QgsLayout(pr)
l.initializeDefaults()
p = QPolygonF()
p.append(QPointF(100.0, 50.0))
p.append(QPointF(100.0, 100.0))
shape = QgsLayoutItemPolyline(p, l)
l.addLayoutItem(shape)
props = {}
props["color"] = "0,0,0,255"
props["width"] = "10.0"
props["capstyle"] = "square"
style = QgsLineSymbol.createSimple(props)
shape.setSymbol(style)
checker = QgsLayoutChecker(
'composerpolyline_vertline', l)
checker.setControlPathPrefix("composer_polyline")
myTestResult, myMessage = checker.testLayout()
assert myTestResult, myMessage
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
dethos/cloudroutes-service | src/bridge/releasescripts/rel7-29-14.py | 6 | 3233 | #!/usr/bin/python
#####################################################################
# Cloud Routes Bridge
# ------------------------------------------------------------------
# Description:
# ------------------------------------------------------------------
# This is a bridge application between the web interface of
# cloudrout.es and the backend cloud routes availability maanger.
# This will gather queue tasks from rethinkdb and create/delete
# the appropriate monitor in the action processes.
# ------------------------------------------------------------------
# Version: Alpha.20140306
# Original Author: Benjamin J. Cane - madflojo@cloudrout.es
# Contributors:
# - your name here
#####################################################################
# Imports
# ------------------------------------------------------------------
# Clean Paths for All
import sys
import yaml
import rethinkdb as r
from rethinkdb.errors import RqlDriverError
import redis
import signal
import syslog
# Load Configuration
# ------------------------------------------------------------------
if len(sys.argv) != 2:
print("Hey, thats not how you launch this...")
print("%s <config file>") % sys.argv[0]
sys.exit(1)
# Open Config File and Parse Config Data
configfile = sys.argv[1]
cfh = open(configfile, "r")
config = yaml.safe_load(cfh)
cfh.close()
# Open External Connections
# ------------------------------------------------------------------
# Open Syslog
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_LOCAL0)
# Redis Server
try:
r_server = redis.Redis(
host=config['redis_host'], port=config['redis_port'],
db=config['redis_db'], password=config['redis_password'])
line = "Connecting to redis"
syslog.syslog(syslog.LOG_INFO, line)
except:
line = "Cannot connect to redis, shutting down"
syslog.syslog(syslog.LOG_ERR, line)
sys.exit(1)
# RethinkDB Server
try:
rdb_server = r.connect(
host=config['rethink_host'], port=config['rethink_port'],
auth_key=config['rethink_authkey'], db=config['rethink_db'])
line = "Connecting to RethinkDB"
syslog.syslog(syslog.LOG_INFO, line)
except RqlDriverError:
line = "Cannot connect to rethinkdb, shutting down"
syslog.syslog(syslog.LOG_ERR, line)
sys.exit(1)
# Handle Kill Signals Cleanly
# ------------------------------------------------------------------
def killhandle(signum, frame):
''' This will close connections cleanly '''
line = "SIGTERM detected, shutting down"
syslog.syslog(syslog.LOG_INFO, line)
rdb_server.close()
zsend.close() # zsend?
syslog.closelog()
sys.exit(0)
signal.signal(signal.SIGTERM, killhandle)
# Helper Functions
# ------------------------------------------------------------------
# Run For Loop
# ------------------------------------------------------------------
results = r.table('monitors').run(rdb_server)
for item in results:
r.table('monitors').get(item['id']).update(
{'failcount': 0}).run(rdb_server)
results = r.table('reactions').run(rdb_server)
for item in results:
r.table('reactions').get(item['id']).update(
{'trigger': 0, 'frequency': 0, 'lastrun': 0}).run(rdb_server)
| agpl-3.0 |
zstackio/zstack-utility | cephbackupstorage/cephbackupstorage/cephagent.py | 1 | 39258 | __author__ = 'frank'
import os
import os.path
import pprint
import traceback
import urllib2
import urlparse
import tempfile
import zstacklib.utils.daemon as daemon
import zstacklib.utils.http as http
import zstacklib.utils.jsonobject as jsonobject
from zstacklib.utils import lock
from zstacklib.utils import linux
from zstacklib.utils import thread
from zstacklib.utils.bash import *
from zstacklib.utils.report import Report
from zstacklib.utils import shell
from zstacklib.utils import ceph
from zstacklib.utils import qemu_img
from zstacklib.utils import traceable_shell
from zstacklib.utils.rollback import rollback, rollbackable
logger = log.get_logger(__name__)
class CephPoolCapacity(object):
def __init__(self, name, availableCapacity, replicatedSize, used, totalCapacity):
self.name = name
self.availableCapacity = availableCapacity
self.replicatedSize = replicatedSize
self.usedCapacity = used
self.totalCapacity = totalCapacity
class AgentCommand(object):
def __init__(self):
pass
class AgentResponse(object):
def __init__(self, success=True, error=None):
self.success = success
self.error = error if error else ''
self.totalCapacity = None
self.availableCapacity = None
self.poolCapacities = None
self.type = None
class InitRsp(AgentResponse):
def __init__(self):
super(InitRsp, self).__init__()
self.fsid = None
class DownloadRsp(AgentResponse):
def __init__(self):
super(DownloadRsp, self).__init__()
self.size = None
self.actualSize = None
class CephToCephMigrateImageCmd(AgentCommand):
@log.sensitive_fields("dstMonSshPassword")
def __init__(self):
super(CephToCephMigrateImageCmd, self).__init__()
self.imageUuid = None
self.imageSize = None # type:long
self.srcInstallPath = None
self.dstInstallPath = None
self.dstMonHostname = None
self.dstMonSshUsername = None
self.dstMonSshPassword = None
self.dstMonSshPort = None # type:int
class UploadProgressRsp(AgentResponse):
def __init__(self):
super(UploadProgressRsp, self).__init__()
self.completed = False
self.progress = 0
self.size = 0
self.actualSize = 0
self.installPath = None
class GetImageSizeRsp(AgentResponse):
def __init__(self):
super(GetImageSizeRsp, self).__init__()
self.size = None
self.actualSize = None
class PingRsp(AgentResponse):
def __init__(self):
super(PingRsp, self).__init__()
self.failure = None
class GetFactsRsp(AgentResponse):
def __init__(self):
super(GetFactsRsp, self).__init__()
self.fsid = None
self.monAddr = None
class DeleteImageMetaDataResponse(AgentResponse):
def __init__(self):
super(DeleteImageMetaDataResponse,self).__init__()
self.ret = None
class WriteImageMetaDataResponse(AgentResponse):
def __init__(self):
super(WriteImageMetaDataResponse,self).__init__()
class GetImageMetaDataResponse(AgentResponse):
def __init__(self):
super(GetImageMetaDataResponse,self).__init__()
self.imagesMetadata= None
class DumpImageMetaDataToFileResponse(AgentResponse):
def __init__(self):
super(DumpImageMetaDataToFileResponse,self).__init__()
class CheckImageMetaDataFileExistResponse(AgentResponse):
def __init__(self):
super(CheckImageMetaDataFileExistResponse, self).__init__()
self.backupStorageMetaFileName = None
self.exist = None
class GetLocalFileSizeRsp(AgentResponse):
def __init__(self):
super(GetLocalFileSizeRsp, self).__init__()
self.size = None
def replyerror(func):
@functools.wraps(func)
def wrap(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
content = traceback.format_exc()
err = '%s\n%s\nargs:%s' % (str(e), content, pprint.pformat([args, kwargs]))
rsp = AgentResponse()
rsp.success = False
rsp.error = str(e)
logger.warn(err)
return jsonobject.dumps(rsp)
return wrap
class UploadTask(object):
def __init__(self, imageUuid, installPath, dstPath, tmpPath):
self.completed = False
self.imageUuid = imageUuid
self.installPath = installPath
self.dstPath = dstPath # without 'ceph://'
self.tmpPath = tmpPath # where image firstly imported to
self.expectedSize = 0
self.downloadedSize = 0
self.progress = 0
self.lastError = None
self.lastOpTime = linux.get_current_timestamp()
self.image_format = "raw"
def fail(self, reason):
self.completed = True
self.lastError = reason
self.lastOpTime = linux.get_current_timestamp()
logger.info('task failed for %s: %s' % (self.imageUuid, reason))
def success(self):
self.completed = True
self.progress = 100
self.lastOpTime = linux.get_current_timestamp()
def is_started(self):
return self.progress > 0
def is_running(self):
return not(self.completed or self.is_started())
class UploadTasks(object):
MAX_RECORDS = 80
def __init__(self):
self.tasks = {}
def _expunge_oldest_task(self):
key, ts = '', linux.get_current_timestamp()
for k in self.tasks:
task = self.tasks[k]
if task.is_running():
continue
if task.lastOpTime < ts:
key, ts = k, task.lastOpTime
if key != '': del(self.tasks[key])
@lock.lock('ceph-upload-task')
def add_task(self, t):
if len(self.tasks) > self.MAX_RECORDS:
self._expunge_oldest_task()
self.tasks[t.imageUuid] = t
@lock.lock('ceph-upload-task')
def get_task(self, imageUuid):
return self.tasks.get(imageUuid)
# ------------------------------------------------------------------ #
class ProgressedFileWriter(object):
def __init__(self, wfd, pfunc):
self.wfd = wfd
self.pfunc = pfunc
self.bytesWritten = 0
def write(self, s):
self.wfd.write(s)
self.bytesWritten += len(s)
self.pfunc(self.bytesWritten)
def seek(self, offset, whence=None):
pass
import cherrypy
class CustomPart(cherrypy._cpreqbody.Part):
"""A customized multipart"""
maxrambytes = 0
def __init__(self, fp, headers, boundary, fifopath, pfunc):
cherrypy._cpreqbody.Part.__init__(self, fp, headers, boundary)
self.wfd = None
self.file = None
self.value = None
self.fifopath = fifopath
self.pfunc = pfunc
def make_file(self):
self.wfd = open(self.fifopath, 'w')
return ProgressedFileWriter(self.wfd, self.pfunc)
def get_boundary(entity):
ib = ""
if 'boundary' in entity.content_type.params:
# http://tools.ietf.org/html/rfc2046#section-5.1.1
# "The grammar for parameters on the Content-type field is such that it
# is often necessary to enclose the boundary parameter values in quotes
# on the Content-type line"
ib = entity.content_type.params['boundary'].strip('"')
if not re.match("^[ -~]{0,200}[!-~]$", ib):
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
ib = ('--' + ib).encode('ascii')
# Find the first marker
while True:
b = entity.readline()
if not b:
return
b = b.strip()
if b == ib:
break
return ib
def get_image_format_from_buf(qhdr):
if qhdr[:4] == 'QFI\xfb':
if qhdr[16:20] == '\x00\x00\x00\00':
return "qcow2"
else:
return "derivedQcow2"
if qhdr[0x8001:0x8006] == 'CD001':
return 'iso'
if qhdr[0x8801:0x8806] == 'CD001':
return 'iso'
if qhdr[0x9001:0x9006] == 'CD001':
return 'iso'
return "raw"
def stream_body(task, fpath, entity, boundary):
def _progress_consumer(total):
task.downloadedSize = total
@thread.AsyncThread
def _do_import(task, fpath):
shell.check_run("cat %s | rbd import --image-format 2 - %s" % (fpath, task.tmpPath))
while True:
headers = cherrypy._cpreqbody.Part.read_headers(entity.fp)
p = CustomPart(entity.fp, headers, boundary, fpath, _progress_consumer)
if not p.filename:
continue
# start consumer
_do_import(task, fpath)
try:
p.process()
except Exception as e:
logger.warn('process image %s failed: %s' % (task.imageUuid, str(e)))
pass
finally:
if p.wfd is not None:
p.wfd.close()
break
if task.downloadedSize != task.expectedSize:
task.fail('incomplete upload, got %d, expect %d' % (task.downloadedSize, task.expectedSize))
shell.run('rbd rm %s' % task.tmpPath)
return
file_format = None
try:
file_format = linux.get_img_fmt('rbd:'+task.tmpPath)
except Exception as e:
task.fail('upload image %s failed: %s' % (task.imageUuid, str(e)))
return
if file_format == 'qcow2':
if linux.qcow2_get_backing_file('rbd:'+task.tmpPath):
task.fail('Qcow2 image %s has backing file' % task.imageUuid)
shell.run('rbd rm %s' % task.tmpPath)
return
conf_path = None
try:
with open('/etc/ceph/ceph.conf', 'r') as fd:
conf = fd.read()
conf = '%s\n%s\n' % (conf, 'rbd default format = 2')
conf_path = linux.write_to_temp_file(conf)
shell.check_run('%s -f qcow2 -O rbd rbd:%s rbd:%s:conf=%s' %
(qemu_img.subcmd('convert'), task.tmpPath, task.dstPath, conf_path))
except Exception as e:
task.fail('cannot convert Qcow2 image %s to rbd' % task.imageUuid)
logger.warn('convert image %s failed: %s', (task.imageUuid, str(e)))
return
finally:
shell.run('rbd rm %s' % task.tmpPath)
if conf_path:
os.remove(conf_path)
else:
shell.check_run('rbd mv %s %s' % (task.tmpPath, task.dstPath))
task.success()
# ------------------------------------------------------------------ #
class CephAgent(object):
INIT_PATH = "/ceph/backupstorage/init"
DOWNLOAD_IMAGE_PATH = "/ceph/backupstorage/image/download"
JOB_CANCEL = "/job/cancel"
UPLOAD_IMAGE_PATH = "/ceph/backupstorage/image/upload"
UPLOAD_PROGRESS_PATH = "/ceph/backupstorage/image/progress"
DELETE_IMAGE_PATH = "/ceph/backupstorage/image/delete"
PING_PATH = "/ceph/backupstorage/ping"
ECHO_PATH = "/ceph/backupstorage/echo"
GET_IMAGE_SIZE_PATH = "/ceph/backupstorage/image/getsize"
GET_FACTS = "/ceph/backupstorage/facts"
GET_IMAGES_METADATA = "/ceph/backupstorage/getimagesmetadata"
DELETE_IMAGES_METADATA = "/ceph/backupstorage/deleteimagesmetadata"
DUMP_IMAGE_METADATA_TO_FILE = "/ceph/backupstorage/dumpimagemetadatatofile"
CHECK_IMAGE_METADATA_FILE_EXIST = "/ceph/backupstorage/checkimagemetadatafileexist"
CHECK_POOL_PATH = "/ceph/backupstorage/checkpool"
GET_LOCAL_FILE_SIZE = "/ceph/backupstorage/getlocalfilesize/"
MIGRATE_IMAGE_PATH = "/ceph/backupstorage/image/migrate"
CEPH_METADATA_FILE = "bs_ceph_info.json"
UPLOAD_PROTO = "upload://"
LENGTH_OF_UUID = 32
http_server = http.HttpServer(port=7761)
http_server.logfile_path = log.get_logfile_path()
upload_tasks = UploadTasks()
def __init__(self):
self.http_server.register_async_uri(self.INIT_PATH, self.init)
self.http_server.register_async_uri(self.DOWNLOAD_IMAGE_PATH, self.download)
self.http_server.register_raw_uri(self.UPLOAD_IMAGE_PATH, self.upload)
self.http_server.register_async_uri(self.UPLOAD_PROGRESS_PATH, self.get_upload_progress)
self.http_server.register_async_uri(self.DELETE_IMAGE_PATH, self.delete)
self.http_server.register_async_uri(self.JOB_CANCEL, self.cancel)
self.http_server.register_async_uri(self.PING_PATH, self.ping)
self.http_server.register_async_uri(self.GET_IMAGE_SIZE_PATH, self.get_image_size)
self.http_server.register_async_uri(self.GET_FACTS, self.get_facts)
self.http_server.register_sync_uri(self.ECHO_PATH, self.echo)
self.http_server.register_async_uri(self.GET_IMAGES_METADATA, self.get_images_metadata)
self.http_server.register_async_uri(self.CHECK_IMAGE_METADATA_FILE_EXIST, self.check_image_metadata_file_exist)
self.http_server.register_async_uri(self.DUMP_IMAGE_METADATA_TO_FILE, self.dump_image_metadata_to_file)
self.http_server.register_async_uri(self.DELETE_IMAGES_METADATA, self.delete_image_metadata_from_file)
self.http_server.register_async_uri(self.CHECK_POOL_PATH, self.check_pool)
self.http_server.register_async_uri(self.GET_LOCAL_FILE_SIZE, self.get_local_file_size)
self.http_server.register_async_uri(self.MIGRATE_IMAGE_PATH, self.migrate_image, cmd=CephToCephMigrateImageCmd())
def _get_capacity(self):
o = shell.call('ceph df -f json')
df = jsonobject.loads(o)
if df.stats.total_bytes__ is not None :
total = long(df.stats.total_bytes_)
elif df.stats.total_space__ is not None:
total = long(df.stats.total_space__) * 1024
else:
raise Exception('unknown ceph df output: %s' % o)
if df.stats.total_avail_bytes__ is not None:
avail = long(df.stats.total_avail_bytes_)
elif df.stats.total_avail__ is not None:
avail = long(df.stats.total_avail_) * 1024
else:
raise Exception('unknown ceph df output: %s' % o)
poolCapacities = []
xsky = True
try:
shell.call('which xms-cli')
except:
xsky = False
if not df.pools:
return total, avail, poolCapacities, xsky
pools = ceph.getCephPoolsCapacity()
if not pools:
return total, avail, poolCapacities, xsky
for pool in pools:
poolCapacity = CephPoolCapacity(pool.poolName, pool.availableCapacity, pool.replicatedSize, pool.usedCapacity, pool.poolTotalSize)
poolCapacities.append(poolCapacity)
return total, avail, poolCapacities, xsky
def _set_capacity_to_response(self, rsp):
total, avail, poolCapacities, xsky = self._get_capacity()
rsp.totalCapacity = total
rsp.availableCapacity = avail
rsp.poolCapacities = poolCapacities
if xsky:
rsp.type = "xsky"
@replyerror
def echo(self, req):
logger.debug('get echoed')
return ''
def _normalize_install_path(self, path):
return path.lstrip('ceph:').lstrip('//')
def _get_file_size(self, path):
o = shell.call('rbd --format json info %s' % path)
o = jsonobject.loads(o)
return long(o.size_)
@replyerror
def get_image_size(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = GetImageSizeRsp()
path = self._normalize_install_path(cmd.installPath)
rsp.size = self._get_file_size(path)
return jsonobject.dumps(rsp)
def _read_file_content(self, path):
with open(path) as f:
return f.read()
@in_bash
@replyerror
def get_images_metadata(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
pool_name = cmd.poolName
bs_uuid = pool_name.split("-")[-1]
valid_images_info = ""
self.get_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
last_image_install_path = ""
bs_ceph_info_file = "/tmp/%s" % self.CEPH_METADATA_FILE
with open(bs_ceph_info_file) as fd:
images_info = fd.read()
for image_info in images_info.split('\n'):
if image_info != '':
image_json = jsonobject.loads(image_info)
# todo support multiple bs
image_uuid = image_json['uuid']
image_install_path = image_json["backupStorageRefs"][0]["installPath"]
ret = bash_r("rbd info %s" % image_install_path.split("//")[1])
if ret == 0 :
logger.info("Check image %s install path %s successfully!" % (image_uuid, image_install_path))
if image_install_path != last_image_install_path:
valid_images_info = image_info + '\n' + valid_images_info
last_image_install_path = image_install_path
else:
logger.warn("Image %s install path %s is invalid!" % (image_uuid, image_install_path))
self.put_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
rsp = GetImageMetaDataResponse()
rsp.imagesMetadata= valid_images_info
return jsonobject.dumps(rsp)
@in_bash
@replyerror
def check_image_metadata_file_exist(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
pool_name = cmd.poolName
bs_uuid = pool_name.split("-")[-1]
rsp = CheckImageMetaDataFileExistResponse()
rsp.backupStorageMetaFileName = self.CEPH_METADATA_FILE
ret, output = bash_ro("rados -p bak-t-%s stat %s" % (bs_uuid,self.CEPH_METADATA_FILE))
if ret == 0:
rsp.exist = True
else:
rsp.exist = False
return jsonobject.dumps(rsp)
def get_metadata_file(self, bs_uuid, file_name):
local_file_name = "/tmp/%s" % file_name
linux.rm_file_force(local_file_name)
bash_ro("rados -p bak-t-%s get %s %s" % (bs_uuid, file_name, local_file_name))
def put_metadata_file(self, bs_uuid, file_name):
local_file_name = "/tmp/%s" % file_name
ret, output = bash_ro("rados -p bak-t-%s put %s %s" % (bs_uuid, file_name, local_file_name))
if ret == 0:
linux.rm_file_force(local_file_name)
@in_bash
@replyerror
def dump_image_metadata_to_file(self, req):
def _write_info_to_metadata_file(fd):
strip_list_content = content[1:-1]
data_list = strip_list_content.split('},')
for item in data_list:
if item.endswith("}") is not True:
item = item + "}"
fd.write(item + '\n')
cmd = jsonobject.loads(req[http.REQUEST_BODY])
pool_name = cmd.poolName
bs_uuid = pool_name.split("-")[-1]
content = cmd.imageMetaData
dump_all_metadata = cmd.dumpAllMetaData
if dump_all_metadata is True:
# this means no metadata exist in ceph
bash_r("touch /tmp/%s" % self.CEPH_METADATA_FILE)
else:
self.get_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
bs_ceph_info_file = "/tmp/%s" % self.CEPH_METADATA_FILE
if content is not None:
if '[' == content[0] and ']' == content[-1]:
if dump_all_metadata is True:
with open(bs_ceph_info_file, 'w') as fd:
_write_info_to_metadata_file(fd)
else:
with open(bs_ceph_info_file, 'a') as fd:
_write_info_to_metadata_file(fd)
else:
# one image info
if dump_all_metadata is True:
with open(bs_ceph_info_file, 'w') as fd:
fd.write(content + '\n')
else:
with open(bs_ceph_info_file, 'a') as fd:
fd.write(content + '\n')
self.put_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
rsp = DumpImageMetaDataToFileResponse()
return jsonobject.dumps(rsp)
@in_bash
@replyerror
def delete_image_metadata_from_file(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
image_uuid = cmd.imageUuid
pool_name = cmd.poolName
bs_uuid = pool_name.split("-")[-1]
self.get_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
bs_ceph_info_file = "/tmp/%s" % self.CEPH_METADATA_FILE
ret, output = bash_ro("sed -i.bak '/%s/d' %s" % (image_uuid, bs_ceph_info_file))
self.put_metadata_file(bs_uuid, self.CEPH_METADATA_FILE)
rsp = DeleteImageMetaDataResponse()
rsp.ret = ret
return jsonobject.dumps(rsp)
@replyerror
@in_bash
def get_facts(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
o = bash_o('ceph mon_status')
mon_status = jsonobject.loads(o)
fsid = mon_status.monmap.fsid_
rsp = GetFactsRsp()
facts = bash_o('ceph -s -f json')
mon_facts = jsonobject.loads(facts)
for mon in mon_facts.monmap.mons:
ADDR = mon.addr.split(':')[0]
if bash_r('ip route | grep -w {{ADDR}} > /dev/null') == 0:
rsp.monAddr = ADDR
break
if not rsp.monAddr:
raise Exception('cannot find mon address of the mon server[%s]' % cmd.monUuid)
rsp.fsid = fsid
return jsonobject.dumps(rsp)
@replyerror
def init(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
o = shell.call('ceph mon_status')
mon_status = jsonobject.loads(o)
fsid = mon_status.monmap.fsid_
existing_pools = shell.call('ceph osd lspools')
for pool in cmd.pools:
if pool.name in existing_pools:
continue
if pool.predefined:
raise Exception('cannot find pool[%s] in the ceph cluster, you must create it manually' % pool.name)
if ceph.is_xsky() or ceph.is_sandstone():
raise Exception(
'The ceph storage type to be added does not support auto initialize pool, please create it manually')
shell.call('ceph osd pool create %s 128' % pool.name)
rsp = InitRsp()
rsp.fsid = fsid
self._set_capacity_to_response(rsp)
return jsonobject.dumps(rsp)
def _parse_install_path(self, path):
return path.lstrip('ceph:').lstrip('//').split('/')
def _fail_task(self, task, reason):
task.fail(reason)
raise Exception(reason)
def _get_fifopath(self, uu):
import tempfile
d = tempfile.gettempdir()
return os.path.join(d, uu)
# handler for multipart upload, requires:
# - header X-IMAGE-UUID
# - header X-IMAGE-SIZE
def upload(self, req):
imageUuid = req.headers['X-IMAGE-UUID']
imageSize = req.headers['X-IMAGE-SIZE']
task = self.upload_tasks.get_task(imageUuid)
if task is None:
raise Exception('image not found %s' % imageUuid)
task.expectedSize = long(imageSize)
total, avail, poolCapacities, xsky = self._get_capacity()
if avail <= task.expectedSize:
self._fail_task(task, 'capacity not enough for size: ' + imageSize)
entity = req.body
boundary = get_boundary(entity)
if not boundary:
self._fail_task(task, 'unexpected post form')
try:
# prepare the fifo to save image upload
fpath = self._get_fifopath(imageUuid)
linux.rm_file_force(fpath)
os.mkfifo(fpath)
stream_body(task, fpath, entity, boundary)
except Exception as e:
self._fail_task(task, str(e))
finally:
linux.rm_file_force(fpath)
def _prepare_upload(self, cmd):
start = len(self.UPLOAD_PROTO)
imageUuid = cmd.url[start:start+self.LENGTH_OF_UUID]
dstPath = self._normalize_install_path(cmd.installPath)
pool, image_name = self._parse_install_path(cmd.installPath)
tmp_image_name = 'tmp-%s' % image_name
tmpPath = '%s/%s' % (pool, tmp_image_name)
task = UploadTask(imageUuid, cmd.installPath, dstPath, tmpPath)
self.upload_tasks.add_task(task)
def _get_upload_path(self, req):
host = req[http.REQUEST_HEADER]['Host']
return 'http://' + host + self.UPLOAD_IMAGE_PATH
@replyerror
def get_upload_progress(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
task = self.upload_tasks.get_task(cmd.imageUuid)
if task is None:
raise Exception('image not found %s' % cmd.imageUuid)
rsp = UploadProgressRsp()
rsp.completed = task.completed
rsp.installPath = task.installPath
rsp.size = task.expectedSize
rsp.actualSize = task.expectedSize
if task.expectedSize == 0:
rsp.progress = 0
elif task.completed:
rsp.progress = 100
else:
rsp.progress = task.downloadedSize * 90 / task.expectedSize
if task.lastError is not None:
rsp.success = False
rsp.error = task.lastError
return jsonobject.dumps(rsp)
@replyerror
@rollback
def download(self, req):
rsp = DownloadRsp()
def _get_origin_format(path):
qcow2_length = 0x9007
if path.startswith('http://') or path.startswith('https://') or path.startswith('ftp://'):
resp = urllib2.urlopen(path)
qhdr = resp.read(qcow2_length)
resp.close()
elif path.startswith('sftp://'):
fd, tmp_file = tempfile.mkstemp()
get_header_from_pipe_cmd = "timeout 60 head --bytes=%d %s > %s" % (qcow2_length, pipe_path, tmp_file)
clean_cmd = "pkill -f %s" % pipe_path
shell.run('%s & %s && %s' % (scp_to_pipe_cmd, get_header_from_pipe_cmd, clean_cmd))
qhdr = os.read(fd, qcow2_length)
if os.path.exists(tmp_file):
os.remove(tmp_file)
else:
resp = open(path)
qhdr = resp.read(qcow2_length)
resp.close()
if len(qhdr) < qcow2_length:
return "raw"
return get_image_format_from_buf(qhdr)
def get_origin_format(fpath, fail_if_has_backing_file=True):
image_format = _get_origin_format(fpath)
if image_format == "derivedQcow2" and fail_if_has_backing_file:
raise Exception('image has backing file or %s is not exist!' % fpath)
return image_format
cmd = jsonobject.loads(req[http.REQUEST_BODY])
shell = traceable_shell.get_shell(cmd)
pool, image_name = self._parse_install_path(cmd.installPath)
tmp_image_name = 'tmp-%s' % image_name
@rollbackable
def _1():
shell.check_run('rbd rm %s/%s' % (pool, tmp_image_name))
def _getRealSize(length):
'''length looks like: 10245K'''
logger.debug(length)
if not length[-1].isalpha():
return length
units = {
"g": lambda x: x * 1024 * 1024 * 1024,
"m": lambda x: x * 1024 * 1024,
"k": lambda x: x * 1024,
}
try:
if not length[-1].isalpha():
return length
return units[length[-1].lower()](int(length[:-1]))
except:
logger.warn(linux.get_exception_stacktrace())
return length
# whether we have an upload request
if cmd.url.startswith(self.UPLOAD_PROTO):
self._prepare_upload(cmd)
rsp.size = 0
rsp.uploadPath = self._get_upload_path(req)
self._set_capacity_to_response(rsp)
return jsonobject.dumps(rsp)
if cmd.sendCommandUrl:
Report.url = cmd.sendCommandUrl
report = Report(cmd.threadContext, cmd.threadContextStack)
report.processType = "AddImage"
report.resourceUuid = cmd.imageUuid
report.progress_report("0", "start")
url = urlparse.urlparse(cmd.url)
if url.scheme in ('http', 'https', 'ftp'):
image_format = get_origin_format(cmd.url, True)
cmd.url = linux.shellquote(cmd.url)
# roll back tmp ceph file after import it
_1()
_, PFILE = tempfile.mkstemp()
content_length = shell.call("""curl -sLI %s|awk '/[cC]ontent-[lL]ength/{print $NF}'""" % cmd.url).splitlines()[-1]
total = _getRealSize(content_length)
def _getProgress(synced):
last = linux.tail_1(PFILE).strip()
if not last or len(last.split()) < 1 or 'HTTP request sent, awaiting response' in last:
return synced
logger.debug("last synced: %s" % last)
written = _getRealSize(last.split()[0])
if total > 0 and synced < written:
synced = written
if synced < total:
percent = int(round(float(synced) / float(total) * 90))
report.progress_report(percent, "report")
return synced
logger.debug("content-length is: %s" % total)
_, _, err = shell.bash_progress_1('set -o pipefail;wget --no-check-certificate -O - %s 2>%s| rbd import --image-format 2 - %s/%s'
% (cmd.url, PFILE, pool, tmp_image_name), _getProgress)
if err:
raise err
actual_size = linux.get_file_size_by_http_head(cmd.url)
if os.path.exists(PFILE):
os.remove(PFILE)
elif url.scheme == 'sftp':
port = (url.port, 22)[url.port is None]
_, PFILE = tempfile.mkstemp()
ssh_pswd_file = None
pipe_path = PFILE + "fifo"
scp_to_pipe_cmd = "scp -P %d -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null %s@%s:%s %s" % (port, url.username, url.hostname, url.path, pipe_path)
sftp_command = "sftp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=no -P %s -b /dev/stdin %s@%s" % (port, url.username, url.hostname) + " <<EOF\n%s\nEOF\n"
if url.password is not None:
ssh_pswd_file = linux.write_to_temp_file(url.password)
scp_to_pipe_cmd = 'sshpass -f %s %s' % (ssh_pswd_file, scp_to_pipe_cmd)
sftp_command = 'sshpass -f %s %s' % (ssh_pswd_file, sftp_command)
actual_size = shell.call(sftp_command % ("ls -l " + url.path)).splitlines()[1].strip().split()[4]
os.mkfifo(pipe_path)
image_format = get_origin_format(cmd.url, True)
cmd.url = linux.shellquote(cmd.url)
# roll back tmp ceph file after import it
_1()
def _get_progress(synced):
if not os.path.exists(PFILE):
return synced
last = linux.tail_1(PFILE).strip()
if not last or not last.isdigit():
return synced
report.progress_report(int(last)*90/100, "report")
return synced
get_content_from_pipe_cmd = "pv -s %s -n %s 2>%s" % (actual_size, pipe_path, PFILE)
import_from_pipe_cmd = "rbd import --image-format 2 - %s/%s" % (pool, tmp_image_name)
_, _, err = shell.bash_progress_1('set -o pipefail; %s & %s | %s' %
(scp_to_pipe_cmd, get_content_from_pipe_cmd, import_from_pipe_cmd), _get_progress)
if ssh_pswd_file:
linux.rm_file_force(ssh_pswd_file)
linux.rm_file_force(PFILE)
linux.rm_file_force(pipe_path)
if err:
raise err
elif url.scheme == 'file':
src_path = cmd.url.lstrip('file:')
src_path = os.path.normpath(src_path)
if not os.path.isfile(src_path):
raise Exception('cannot find the file[%s]' % src_path)
image_format = get_origin_format(src_path, True)
# roll back tmp ceph file after import it
_1()
shell.check_run("rbd import --image-format 2 %s %s/%s" % (src_path, pool, tmp_image_name))
actual_size = os.path.getsize(src_path)
else:
raise Exception('unknown url[%s]' % cmd.url)
file_format = shell.call("set -o pipefail; %s rbd:%s/%s | grep 'file format' | cut -d ':' -f 2" %
(qemu_img.subcmd('info'), pool, tmp_image_name))
file_format = file_format.strip()
if file_format not in ['qcow2', 'raw']:
raise Exception('unknown image format: %s' % file_format)
if file_format == 'qcow2':
conf_path = None
try:
with open('/etc/ceph/ceph.conf', 'r') as fd:
conf = fd.read()
conf = '%s\n%s\n' % (conf, 'rbd default format = 2')
conf_path = linux.write_to_temp_file(conf)
shell.check_run('%s -f qcow2 -O rbd rbd:%s/%s rbd:%s/%s:conf=%s' %
(qemu_img.subcmd('convert'), pool, tmp_image_name, pool, image_name, conf_path))
shell.check_run('rbd rm %s/%s' % (pool, tmp_image_name))
finally:
if conf_path:
os.remove(conf_path)
else:
shell.check_run('rbd mv %s/%s %s/%s' % (pool, tmp_image_name, pool, image_name))
report.progress_report("100", "finish")
@rollbackable
def _2():
shell.check_run('rbd rm %s/%s' % (pool, image_name))
_2()
o = shell.call('rbd --format json info %s/%s' % (pool, image_name))
image_stats = jsonobject.loads(o)
rsp.size = long(image_stats.size_)
rsp.actualSize = actual_size
if image_format == "qcow2":
rsp.format = "raw"
else:
rsp.format = image_format
self._set_capacity_to_response(rsp)
return jsonobject.dumps(rsp)
@replyerror
def ping(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = PingRsp()
facts = bash_o('ceph -s -f json')
mon_facts = jsonobject.loads(facts)
found = False
for mon in mon_facts.monmap.mons:
if cmd.monAddr in mon.addr:
found = True
break
if not found:
rsp.success = False
rsp.failure = "MonAddrChanged"
rsp.error = 'The mon addr is changed on the mon server[uuid:%s], not %s anymore.' \
'Reconnect the ceph primary storage' \
' may solve this issue' % (cmd.monUuid, cmd.monAddr)
return jsonobject.dumps(rsp)
pool, objname = cmd.testImagePath.split('/')
create_img = shell.ShellCmd("echo zstack | rados -p '%s' put '%s' -" % (pool, objname))
create_img(False)
if create_img.return_code != 0:
rsp.success = False
rsp.failure = 'UnableToCreateFile'
rsp.error = "%s %s" % (create_img.stderr, create_img.stdout)
else:
shell.run("rados -p '%s' rm '%s'" % (pool, objname))
linux.write_uuids("cephmonbs", "cephmonbs=%s" % cmd.monUuid)
return jsonobject.dumps(rsp)
@replyerror
def delete(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
pool, image_name = self._parse_install_path(cmd.installPath)
def delete_image(_):
# in case image is deleted, we don't have to wait for timeout
img = "%s/%s" % (pool, image_name)
shell.check_run('rbd info %s && rbd rm %s' % (img, img))
return True
# 'rbd rm' might fail due to client crash. We wait for 30 seconds as suggested by 'rbd'.
#
# rbd: error: image still has watchers
# This means the image is still open or the client using it crashed. Try again after
# closing/unmapping it or waiting 30s for the crashed client to timeout.
linux.wait_callback_success(delete_image, interval=5, timeout=30, ignore_exception_in_callback=True)
rsp = AgentResponse()
self._set_capacity_to_response(rsp)
return jsonobject.dumps(rsp)
@replyerror
def check_pool(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
existing_pools = shell.call('ceph osd lspools')
for pool in cmd.pools:
if pool.name not in existing_pools:
raise Exception('cannot find pool[%s] in the ceph cluster, you must create it manually' % pool.name)
return jsonobject.dumps(AgentResponse())
@replyerror
def get_local_file_size(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = GetLocalFileSizeRsp()
rsp.size = linux.get_local_file_size(cmd.path)
return jsonobject.dumps(rsp)
def _migrate_image(self, image_uuid, image_size, src_install_path, dst_install_path, dst_mon_addr, dst_mon_user, dst_mon_passwd, dst_mon_port):
src_install_path = self._normalize_install_path(src_install_path)
dst_install_path = self._normalize_install_path(dst_install_path)
ssh_cmd, tmp_file = linux.build_sshpass_cmd(dst_mon_addr, dst_mon_passwd, 'tee >(md5sum >/tmp/%s_dst_md5) | rbd import - %s' % (image_uuid, dst_install_path), dst_mon_user, dst_mon_port)
rst = shell.run("rbd export %s - | tee >(md5sum >/tmp/%s_src_md5) | %s" % (src_install_path, image_uuid, ssh_cmd))
linux.rm_file_force(tmp_file)
if rst != 0:
return rst
src_md5 = self._read_file_content('/tmp/%s_src_md5' % image_uuid)
dst_md5 = linux.sshpass_call(dst_mon_addr, dst_mon_passwd, 'cat /tmp/%s_dst_md5' % image_uuid, dst_mon_user, dst_mon_port)
if src_md5 != dst_md5:
return -1
else:
return 0
@replyerror
@in_bash
def migrate_image(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = AgentResponse()
rst = self._migrate_image(cmd.imageUuid, cmd.imageSize, cmd.srcInstallPath, cmd.dstInstallPath, cmd.dstMonHostname, cmd.dstMonSshUsername, cmd.dstMonSshPassword, cmd.dstMonSshPort)
if rst != 0:
rsp.success = False
rsp.error = "Failed to migrate image from one ceph backup storage to another."
self._set_capacity_to_response(rsp)
return jsonobject.dumps(rsp)
@replyerror
def cancel(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = AgentResponse()
if not traceable_shell.cancel_job(cmd):
rsp.success = False
rsp.error = "no matched job to cancel"
return jsonobject.dumps(rsp)
class CephDaemon(daemon.Daemon):
def __init__(self, pidfile, py_process_name):
super(CephDaemon, self).__init__(pidfile, py_process_name)
def run(self):
self.agent = CephAgent()
self.agent.http_server.start()
| apache-2.0 |
kshehadeh/robotframework-selenium2library | demo/demoapp/server.py | 48 | 2648 | #!/usr/bin/env python
# Copyright 2008-2011 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple HTTP server requiring only Python and no other preconditions.
Server is started by running this script with argument 'start' and
optional port number (default port 7272). Server root is the same
directory where this script is situated. Server can be stopped either
using Ctrl-C or running this script with argument 'stop' and same port
number as when starting it.
"""
import os
import sys
import httplib
import BaseHTTPServer
import SimpleHTTPServer
DEFAULT_PORT = 7272
DEFAULT_HOST = 'localhost'
class StoppableHttpServer(BaseHTTPServer.HTTPServer):
def serve_forever(self):
self.stop = False
while not self.stop:
try:
self.handle_request()
except KeyboardInterrupt:
break
class StoppableHttpRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_QUIT(self):
self.send_response(200)
self.end_headers()
self.server.stop = True
def do_POST(self):
# We could also process paremeters here using something like below.
# length = self.headers['Content-Length']
# print self.rfile.read(int(length))
self.do_GET()
def start_server(host=DEFAULT_HOST, port=DEFAULT_PORT):
print "Demo application starting on port %s" % port
root = os.path.dirname(os.path.abspath(__file__))
os.chdir(root)
server = StoppableHttpServer((host, int(port)), StoppableHttpRequestHandler)
server.serve_forever()
def stop_server(host=DEFAULT_HOST, port=DEFAULT_PORT):
print "Demo application on port %s stopping" % port
conn = httplib.HTTPConnection("%s:%s" % (host, port))
conn.request("QUIT", "/")
conn.getresponse()
def print_help():
print __doc__
if __name__ == '__main__':
try:
{'start': start_server,
'stop': stop_server,
'help': print_help}[sys.argv[1]](*sys.argv[2:])
except (IndexError, KeyError, TypeError):
print 'Usage: %s start|stop|help [port]' % os.path.basename(sys.argv[0])
| apache-2.0 |
amaurywalbert/twitter | evaluation/without_ground_truth/old/hashmap_calc_metrics_without_ego_full_snap_v3.0.py | 1 | 13898 | # -*- coding: latin1 -*-
################################################################################################
import snap,datetime, sys, time, json, os, os.path, shutil, time, struct, random
import metrics_v2,calc
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
######################################################################################################################################################################
## Status - Versão 1 - Calcular métrica definida abaixo para avaliação sem ground truth - Usando a Biblioteca SNAP
## Versão 2 - Calcular todas as métricas métrica definidas abaixo para avaliação sem ground truth
## Versaõ 3 - Usa calc metrics_v2 - com correções... e salvar os dados dos calculos em arquivo texto.
##
## SALVA ARQUIVOS NOS DIRETÒRIOS:
## RAW: conforme calculado -
## SEPARATE BY METRICS
##
## # INPUT: Arquivos com as comunidades detectadas, rede e o ground truth
##
## # OUTPUT:
## Resultados separados por métrica
######################################################################################################################################################################
######################################################################################################################################################################
#
# Recebe arquivo e devolve dicionário com as comunidades
#
######################################################################################################################################################################
def prepare_communities(community_file):
i=0
communities = {}
for line in community_file:
i+=1
key="com"+str(i) # Chave para o dicionário comm
comm = [] # Lista para armazenar as comunidades
a = line.split(' ')
for item in a:
if item != "\n":
comm.append(long(item))
communities[key] = comm # dicionário communities recebe a lista de ids das comunidades tendo como chave o valor key
return communities
######################################################################################################################################################################
#
# Criar diretórios
#
######################################################################################################################################################################
def create_dirs(out_ad,out_c,out_cut_r,out_d,out_e,out_normal_cut,out_s):
if not os.path.exists(out_ad):
os.makedirs(out_ad)
if not os.path.exists(out_c):
os.makedirs(out_c)
if not os.path.exists(out_cut_r):
os.makedirs(out_cut_r)
if not os.path.exists(out_d):
os.makedirs(out_d)
if not os.path.exists(out_e):
os.makedirs(out_e)
if not os.path.exists(out_normal_cut):
os.makedirs(out_normal_cut)
if not os.path.exists(out_s):
os.makedirs(out_s)
######################################################################################################################################################################
#
# Cálculos iniciais sobre o conjunto de dados lidos.
#
######################################################################################################################################################################
def calculate_alg(singletons,net,uw,ud,g_type,alg):
communities = "/home/amaury/communities_hashmap/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
if alg == "infomap":
graphs = "/home/amaury/graphs_hashmap_infomap/"+str(net)+"/"+str(g_type)+"/"
elif alg == "infomap_without_weight":
graphs = "/home/amaury/graphs_hashmap_infomap_without_weight/"+str(net)+"/"+str(g_type)+"/"
else:
graphs = "/home/amaury/graphs_hashmap/"+str(net)+"/"+str(g_type)+"/"
out_ad = str(output_dir)+"average_degree/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_c = str(output_dir)+"conductance/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_cut_r = str(output_dir)+"cut_ratio/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_d = str(output_dir)+"density/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_e = str(output_dir)+"expansion/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_normal_cut = str(output_dir)+"normalized_cut/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
out_s = str(output_dir)+"separability/"+str(g_type)+"/"+str(alg)+"/"+str(singletons)+"/"+str(net)+"/"
_avg_time = []
if not os.path.exists(communities):
print ("Diretório com as comunidades não encontrado: "+str(communities)+"\n")
else:
print("\n######################################################################")
for threshold in os.listdir(communities):
if not os.path.isdir(str(communities)+str(threshold)+"/"):
print ("Threshold para a rede "+str(net)+" não encontrado: "+str(threshold))
else:
partial_start = time.time()
create_dirs(out_ad,out_c,out_cut_r,out_d,out_e,out_normal_cut,out_s)
if os.path.exists(str(out_ad)+str(threshold)+".json") and os.path.exists(str(out_c)+str(threshold)+".json") and os.path.exists(str(out_cut_r)+str(threshold)+".json") and os.path.exists(str(out_d)+str(threshold)+".json") and os.path.exists(str(out_e)+str(threshold)+".json") and os.path.exists(str(out_normal_cut)+str(threshold)+".json") and os.path.exists(str(out_s)+str(threshold)+".json"):
print ("Arquivo de destino já existe: "+str(threshold)+".json")
else:
print("######################################################################")
average_degree = {}
conductance = {}
cut_ratio = {}
density = {}
expansion = {}
normalized_cut = {}
separability = {}
i=0 #Ponteiro para o ego
for file in os.listdir(str(communities)+str(threshold)+"/"):
if os.path.isfile(str(communities)+str(threshold)+"/"+file):
ego_id = file.split(".txt")
ego_id = long(ego_id[0])
i+=1
if not os.path.isfile(str(graphs)+str(ego_id)+".edge_list"):
print ("ERROR - EGO: "+str(i)+" - Arquivo com lista de arestas não encontrado:" +str(graphs)+str(ego_id)+".edge_list")
else:
with open(str(communities)+str(threshold)+"/"+file, 'r') as community_file:
if ud is False:
G = snap.LoadEdgeList(snap.PNGraph, str(graphs)+str(ego_id)+".edge_list", 0, 1) # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
else:
G = snap.LoadEdgeList(snap.PUNGraph, str(graphs)+str(ego_id)+".edge_list", 0, 1) # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
print(str(g_type)+" - "+str(alg)+" - "+str(singletons)+" - Rede: "+str(net)+" - THRESHOLD: "+str(threshold)+" - ego("+str(i)+"): "+str(file))
communities_dict = prepare_communities(community_file) #Função para devolver um dicionário com as comunidades
avg_ad,avg_c,avg_cut_r,avg_d,avg_e,avg_normal_cut,avg_s = metrics_v2.calc_metrics(communities_dict,G,ud) # Calcular as métricas
average_degree[ego_id] = avg_ad
conductance[ego_id] = avg_c
cut_ratio[ego_id] = avg_cut_r
density[ego_id] = avg_d
expansion[ego_id] = avg_e
normalized_cut[ego_id] = avg_normal_cut
separability[ego_id] = avg_s
print ("Average Degree: "+str(avg_ad['media'])+" - Conductance: "+str(avg_c['media'])+" - Cut Ratio: "+str(avg_cut_r['media'])+" - Density: "+str(avg_d['media']))
print ("Expansion: "+str(avg_e['media'])+" - Normalized Cut: "+str(avg_normal_cut['media'])+" - Separability: "+str(avg_s['media']))
print
print("######################################################################")
with open(str(out_ad)+str(threshold)+".json", "w") as f:
f.write(json.dumps(average_degree))
with open(str(out_c)+str(threshold)+".json", "w") as f:
f.write(json.dumps(conductance))
with open(str(out_cut_r)+str(threshold)+".json", "w") as f:
f.write(json.dumps(cut_ratio))
with open(str(out_d)+str(threshold)+".json", "w") as f:
f.write(json.dumps(density))
with open(str(out_e)+str(threshold)+".json", "w") as f:
f.write(json.dumps(expansion))
with open(str(out_normal_cut)+str(threshold)+".json", "w") as f:
f.write(json.dumps(normalized_cut))
with open(str(out_s)+str(threshold)+".json", "w") as f:
f.write(json.dumps(separability))
partial_end = time.time()
partial_time_exec = partial_end - partial_start
print ("\nTempo de execução para o threshold "+str(threshold)+": "+str(partial_time_exec)+"\n")
_avg_time.append(partial_time_exec)
avg_time = calc.calcular(_avg_time)
print ("\nTempo de médio de execução em cada threshold: "+str(avg_time)+"\n")
print("\n######################################################################\n")
######################################################################################################################################################################
#
# Método principal do programa.
# Realiza teste e coleta dos dados de cada user especificado no arquivo.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Avaliação de Comunidades - Amaury's Software "
print" "
print"#################################################################################"
print
print
print" 1 - Follow"
print" 9 - Follwowers"
print" 2 - Retweets"
print" 3 - Likes"
print" 4 - Mentions"
print " "
print" 5 - Co-Follow"
print" 10 - Co-Followers"
print" 6 - Co-Retweets"
print" 7 - Co-Likes"
print" 8 - Co-Mentions"
print
op = int(raw_input("Escolha uma opção acima: "))
if op in (5,6,7,8,10): # Testar se é um grafo direcionado ou não
ud = True
elif op in (1,2,3,4,9):
ud = False
else:
print("Opção inválida! Saindo...")
sys.exit()
if op == 1 or op == 9: # Testar se é um grafo direcionado ou não
uw = True
else:
uw = False
print
print ("\n")
######################################################################
net = "n"+str(op)
#######################################################################
#######################################################################
print("######################################################################")
print
print "Algoritmo utilizado na detecção das comunidades"
print
print
print" 1 - COPRA"
print" 2 - OSLOM"
print" 3 - GN"
print" 4 - COPRA - Partition"
print" 5 - INFOMAP - Partition"
print" 6 - INFOMAP - Partition - Without Weight"
print
op2 = int(raw_input("Escolha uma opção acima: "))
if op2 == 1:
alg = "copra"
elif op2 == 2:
alg = "oslom"
elif op2 == 3:
alg = "gn"
elif op2 == 4:
alg = "copra_partition"
elif op2 == 5:
alg = "infomap"
elif op2 == 6:
alg = "infomap_without_weight"
else:
alg = ""
print("Opção inválida! Saindo...")
sys.exit()
print
print ("\n")
#######################################################################
#######################################################################
print
print ("Opção escolhida: "+str(net)+" - "+str(alg))
print ("Aguarde...")
time.sleep(5)
######################################################################################################################
g_type1 = "graphs_with_ego"
g_type2 = "graphs_without_ego"
singletons1 = "full"
singletons2 = "without_singletons"
######################################################################################################################
os.system('clear')
start = time.time()
# print ("Calculando métricas nas comunidades detectadas na rede: "+str(net)+" - "+str(g_type1)+" - Algoritmo: "+str(alg)+" - "+str(singletons1))
# calculate_alg(singletons1,net,uw,ud,g_type1,alg)
print ("Calculando métricas nas comunidades detectadas na rede: "+str(net)+" - "+str(g_type2)+" - Algoritmo: "+str(alg)+" - "+str(singletons1))
calculate_alg(singletons1,net,uw,ud,g_type2,alg)
end = time.time()
time_exec = end - start
######################################################################################################################
print("######################################################################")
print("\nScript finalizado! Tempo de execução: "+str(time_exec)+"\n")
print("######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
output_dir = "/home/amaury/Dropbox/evaluation_hashmap/without_ground_truth/"
######################################################################################################################
if __name__ == "__main__": main()
| gpl-3.0 |
EvaSDK/pyshop | pyshop/models.py | 4 | 29585 | #-*- coding: utf-8 -*-
"""
PyShop models
Describe the sql schema of PyShop using SQLAlchemy.
PyShop uses with SQLAlchemy with the sqlite backend.
"""
import re
import sys
import logging
# from distutils.util import get_platform
try:
import ldap
except ImportError:
# means that python-ldap is not installed
ldap = None
from pyramid.settings import asbool
import cryptacular.bcrypt
import transaction
from pkg_resources import parse_version
from sqlalchemy import (Table, Column, ForeignKey, Index,
Integer, Boolean, Unicode, UnicodeText,
DateTime, Enum)
from sqlalchemy.orm import relationship, synonym, backref
from sqlalchemy.sql.expression import func, or_, and_
from sqlalchemy.ext.declarative import declared_attr
from pyshop.compat import unicode
from .helpers.sqla import (Database, SessionFactory, ModelError,
create_engine as create_engine_base,
dispose_engine as dispose_engine_base
)
log = logging.getLogger(__file__)
crypt = cryptacular.bcrypt.BCRYPTPasswordManager()
Base = Database.register('pyshop')
DBSession = lambda: SessionFactory.get('pyshop')()
re_email = re.compile(r'^[^@]+@[a-z0-9]+[-.a-z0-9]+\.[a-z]+$', re.I)
def _whlify(filename):
if filename.endswith('.tar.gz'):
pkg = filename[:-7]
elif filename.endswith('.tar.bz2'):
pkg = filename[:-8]
elif filename.endswith('.zip'):
pkg = filename[:-4]
else:
raise NotImplementedError('filename %s not supported' % filename)
return u'{pkg}-py{pyvermax}{pyvermin}-none-{platform}'\
'.whl'.format(pkg=pkg,
platform='any', # XXX should works ! get_platform()
pyvermax=sys.version_info[0],
pyvermin=sys.version_info[1],
)
def create_engine(settings, prefix='sqlalchemy.', scoped=False):
"""
Create the SQLAlchemy engine from the paste settings.
:param settings: WSGI Paste parameters from the ini file.
:type settings: dict
:param prefix: SQLAlchemy engine configuration key prefix
:type prefix: unicode
:param scoped: True if the created engine configure a scoped session.
:type scoped: bool
:return: SQLAlchemy created engine
:rtype: :class:`sqlalchemy.Engine`
"""
return create_engine_base('pyshop', settings, prefix, scoped)
def dispose_engine():
"""Dispose the pyshop SQLAlchemy engine"""
dispose_engine_base('pyshop')
class Permission(Base):
"""Describe a user permission"""
name = Column(Unicode(255), nullable=False, unique=True)
group__permission = Table('group__permission', Base.metadata,
Column('group_id', Integer, ForeignKey('group.id')),
Column('permission_id',
Integer, ForeignKey('permission.id'))
)
class Group(Base):
"""
Describe user's groups.
"""
name = Column(Unicode(255), nullable=False, unique=True)
permissions = relationship(Permission, secondary=group__permission,
lazy='select')
@classmethod
def by_name(cls, session, name):
"""
Get a package from a given name.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param name: name of the group
:type name: `unicode
:return: package instance
:rtype: :class:`pyshop.models.Group`
"""
return cls.first(session, where=(cls.name == name,))
user__group = Table('user__group', Base.metadata,
Column('group_id', Integer, ForeignKey('group.id')),
Column('user_id', Integer, ForeignKey('user.id'))
)
class User(Base):
"""
Describe a user.
This model handle `local` users granted to access pyshop and
mirrored users from PyPI."""
@declared_attr
def __table_args__(cls):
return (Index('idx_%s_login_local' % cls.__tablename__,
'login', 'local', unique=True),
{'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8',
}
)
login = Column(Unicode(255), nullable=False)
_password = Column('password', Unicode(60), nullable=True)
firstname = Column(Unicode(255), nullable=True)
lastname = Column(Unicode(255), nullable=True)
email = Column(Unicode(255), nullable=True)
groups = relationship(Group, secondary=user__group, lazy='joined',
backref='users')
local = Column(Boolean, nullable=False, default=True)
@property
def name(self):
return u'%s %s' % (self.firstname, self.lastname)\
if self.firstname and self.lastname else self.login
def _get_password(self):
return self._password
def _set_password(self, password):
self._password = unicode(crypt.encode(password))
password = property(_get_password, _set_password)
password = synonym('_password', descriptor=password)
@property
def permissions(self):
result = set()
for group in self.groups:
result = result.union([perm.name for perm in group.permissions])
return list(result)
@classmethod
def by_login(cls, session, login, local=True):
"""
Get a user from a given login.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: the user login
:type login: unicode
:return: the associated user
:rtype: :class:`pyshop.models.User`
"""
user = cls.first(session,
where=((cls.login == login),
(cls.local == local),)
)
# XXX it's appear that this is not case sensitive !
return user if user and user.login == login else None
@classmethod
def by_credentials(cls, session, login, password):
"""
Get a user from given credentials
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: username
:type login: unicode
:param password: user password
:type password: unicode
:return: associated user
:rtype: :class:`pyshop.models.User`
"""
user = cls.by_login(session, login, local=True)
if not user:
return None
if crypt.check(user.password, password):
return user
@classmethod
def by_ldap_credentials(cls, session, login, password, settings):
"""if possible try to contact the LDAP for authentification if success
and login don't exist localy create one and return it
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param login: username
:type login: unicode
:param password: user password
:type password: unicode
:param settings: settings from self.request.registry.settings in views
:type settings: dict
:return: associated user
:rtype: :class:`pyshop.models.User`
"""
if not asbool(settings.get('pyshop.ldap.use_for_auth','False')):
return None
if ldap is None:
raise ImportError("no module name ldap. Install python-ldap package")
try:
if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, '/etc/openldap/cacerts')
ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF)
ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON)
ldap.set_option(ldap.OPT_TIMEOUT, 20)
ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)
ldap.set_option(ldap.OPT_TIMELIMIT, 15)
ldap_server_type = settings.get('pyshop.ldap.type', 'ldap')
host=settings['pyshop.ldap.host'].strip()
port = settings.get('pyshop.ldap.port', None).strip()
if ldap_server_type in ["ldaps", "start_tls"]:
port = port or 689
ldap_type = "ldaps"
certreq = settings.get('pyshop.ldap.certreq', 'DEMAND').strip()
if certreq not in ['DEMAND', 'ALLOW', 'HARD', 'TRY', 'NEVER']:
certreq = 'DEMAND'
tls_cert = getattr(ldap, 'OPT_X_TLS_%s' % certreq)
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_cert)
else:
port = port or 389
ldap_type = 'ldap'
server_url = "{ldap_type}://{host}:{port}".format(ldap_type=ldap_type,
host=host,
port=port)
server = ldap.initialize(server_url)
if ldap_server_type == "start_tls":
server.start_tls_s()
server.protocol = ldap.VERSION3
# bind the account if needed
if settings['pyshop.ldap.account'] and settings['pyshop.ldap.password']:
server.simple_bind_s(settings['pyshop.ldap.account'],
settings['pyshop.ldap.password'])
filter_ = settings['pyshop.ldap.search_filter'].format(username=login)
results = server.search_ext_s(settings['pyshop.ldap.bind_dn'],
getattr(ldap,"SCOPE_%s"%settings['pyshop.ldap.search_scope']),
filter_)
if results is None:
log.debug("LDAP rejected password for user %s" % (login))
return None
for (dn, _attrs) in results:
if dn is None:
continue
log.debug('Trying simple bind with %s' % dn)
server.simple_bind_s(dn, password)
attrs = server.search_ext_s(dn, ldap.SCOPE_BASE, '(objectClass=*)')[0][1]
break
else:
log.debug("No matching LDAP objects for authentication of '%s'", login)
return None
log.debug('LDAP authentication OK')
# we may create a new user if it don't exist
user_ldap = User.by_login(session, login)
if user_ldap is None:
log.debug('create user %s'%login)
user_ldap = User()
user_ldap.login = login
user_ldap.password = password
user_ldap.local = True
user_ldap.firstname = attrs[settings['pyshop.ldap.first_name_attr']][0]
user_ldap.lastname = attrs[settings['pyshop.ldap.last_name_attr']][0]
user_ldap.email = attrs[settings['pyshop.ldap.email_attr']][0]
for groupname in ["developer","installer"]:
user_ldap.groups.append(Group.by_name(session, groupname))
if user_ldap.validate(session):
session.add(user_ldap)
log.debug('User "{}" added'.format(login))
transaction.commit()
# its OK
return user_ldap
except ldap.NO_SUCH_OBJECT:
log.debug("LDAP says no such user '%s'" % (login))
except ldap.SERVER_DOWN:
log.error("LDAP can't access authentication server")
except ldap.LDAPError:
log.error('ERROR while using LDAP connection')
except Exception as exc:
log.error('Unmanaged exception %s' % exc, exc_info=True)
return None
@classmethod
def get_locals(cls, session, **kwargs):
"""
Get all local users.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:return: local users
:rtype: generator of :class:`pyshop.models.User`
"""
return cls.find(session,
where=(cls.local == True,),
order_by=cls.login,
**kwargs)
def has_permission(self, permission):
return permission in self.permissions
def validate(self, session):
"""
Validate that the current user can be saved.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:return: ``True``
:rtype: bool
:raise: :class:`pyshop.helpers.sqla.ModelError` if user is not valid
"""
errors = []
if not self.login:
errors.append(u'login is required')
else:
other = User.by_login(session, self.login)
if other and other.id != self.id:
errors.append(u'duplicate login %s' % self.login)
if not self.password:
errors.append(u'password is required')
if not self.email:
errors.append(u'email is required')
elif not re_email.match(self.email):
errors.append(u'%s is not a valid email' % self.email)
if len(errors):
raise ModelError(errors)
return True
class Classifier(Base):
"""
Describe a Python Package Classifier.
"""
@declared_attr
def __table_args__(cls):
return (Index('idx_%s_category_name' % cls.__tablename__,
'category', 'name', unique=True),
{'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8',
}
)
name = Column(Unicode(255), nullable=False, unique=True)
parent_id = Column(Integer, ForeignKey(u'classifier.id'))
category = Column(Unicode(80), nullable=False)
parent = relationship(u'Classifier', remote_side=u'Classifier.id',
backref=u'childs')
@property
def shortname(self):
"""
Last part of the classifier.
"""
return self.name.rsplit(u'::', 1)[-1].strip()
@classmethod
def by_name(cls, session, name, **kwargs):
"""
Get a classifier from a given name.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param name: name of the classifier
:type name: `unicode
:return: classifier instance
:rtype: :class:`pyshop.models.Classifier`
"""
classifier = cls.first(session, where=(cls.name == name,))
if not kwargs.get('create_if_not_exists', False):
return classifier
if not classifier:
splitted_names = [n.strip() for n in name.split(u'::')]
classifiers = [u' :: '.join(splitted_names[:i + 1])
for i in range(len(splitted_names))]
parent_id = None
category = splitted_names[0]
for c in classifiers:
classifier = cls.first(session, where=(cls.name == c,))
if not classifier:
classifier = Classifier(name=c, parent_id=parent_id,
category=category)
session.add(classifier)
session.flush()
parent_id = classifier.id
return classifier
package__owner = Table('package__owner', Base.metadata,
Column('package_id', Integer, ForeignKey('package.id')),
Column('owner_id', Integer, ForeignKey('user.id'))
)
package__maintainer = Table('package__maintainer', Base.metadata,
Column('package_id',
Integer, ForeignKey('package.id')),
Column('maintainer_id',
Integer, ForeignKey('user.id'))
)
classifier__package = Table('classifier__package', Base.metadata,
Column('classifier_id',
Integer, ForeignKey('classifier.id')),
Column('package_id',
Integer, ForeignKey('package.id'))
)
class Package(Base):
"""
Describe a Python Package.
"""
update_at = Column(DateTime, default=func.now())
name = Column(Unicode(200), unique=True)
local = Column(Boolean, nullable=False, default=False)
owners = relationship(User, secondary=package__owner,
backref='owned_packages')
downloads = Column(Integer, default=0)
maintainers = relationship(User, secondary=package__maintainer,
backref='maintained_packages')
classifiers = relationship(Classifier, secondary=classifier__package,
lazy='dynamic', backref='packages')
@property
def versions(self):
"""
Available versions.
"""
return [r.version for r in self.sorted_releases]
@property
def sorted_releases(self):
"""
Releases sorted by version.
"""
releases = [(parse_version(release.version), release)
for release in self.releases]
releases.sort(reverse=True)
return [release[1] for release in releases]
@classmethod
def by_name(cls, session, name):
"""
Get a package from a given name.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param name: name of the package
:type name: `unicode
:return: package instance
:rtype: :class:`pyshop.models.Package`
"""
# XXX the field "name" should be created with a
# case insensitive collation.
pkg = cls.first(session, where=(cls.name.like(name),))
if not pkg:
name = name.replace(u'-', u'_').upper()
pkg = cls.first(session,
where=(cls.name.like(name),))
# XXX _ is a like operator
if pkg and pkg.name.upper().replace(u'-', u'_') != name:
pkg = None
return pkg
@classmethod
def by_filter(cls, session, opts, **kwargs):
"""
Get packages from given filters.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param opts: filtering options
:type opts: `dict
:return: package instances
:rtype: generator of :class:`pyshop.models.Package`
"""
where = []
if opts.get('local_only'):
where.append(cls.local == True)
if opts.get('names'):
where.append(cls.name.in_(opts['names']))
if opts.get('classifiers'):
ids = [c.id for c in opts.get('classifiers')]
cls_pkg = classifier__package
qry = session.query(cls_pkg.c.package_id,
func.count('*'))
qry = qry.filter(cls_pkg.c.classifier_id.in_(ids))
qry = qry.group_by(cls_pkg.c.package_id)
qry = qry.having(func.count('*') >= len(ids))
where.append(cls.id.in_([r[0] for r in qry.all()]))
return cls.find(session, where=where, **kwargs)
@classmethod
def by_owner(cls, session, owner_name):
"""
Get packages from a given owner username.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param owner_name: owner username
:type owner_name: unicode
:return: package instances
:rtype: generator of :class:`pyshop.models.Package`
"""
return cls.find(session,
join=(cls.owners),
where=(User.login == owner_name,),
order_by=cls.name)
@classmethod
def by_maintainer(cls, session, maintainer_name):
"""
Get package from a given maintainer name.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param maintainer_name: maintainer username
:type maintainer_name: unicode
:return: package instances
:rtype: generator of :class:`pyshop.models.Package`
"""
return cls.find(session,
join=(cls.maintainers),
where=(User.login == maintainer_name,),
order_by=cls.name)
@classmethod
def get_locals(cls, session):
"""
Get all local packages.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:return: package instances
:rtype: generator of :class:`pyshop.models.Package`
"""
return cls.find(session,
where=(cls.local == True,))
@classmethod
def get_mirrored(cls, session):
"""
Get all mirrored packages.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:return: package instances
:rtype: generator of :class:`pyshop.models.Package`
"""
return cls.find(session,
where=(cls.local == False,))
classifier__release = Table('classifier__release', Base.metadata,
Column('classifier_id', Integer,
ForeignKey('classifier.id', ondelete='cascade')),
Column('release_id',
Integer, ForeignKey('release.id'))
)
class Release(Base):
"""
Describe Python Package Release.
"""
@declared_attr
def __table_args__(cls):
return (Index('idx_%s_package_id_version' % cls.__tablename__,
'package_id', 'version', unique=True),
{'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8',
}
)
version = Column(Unicode(60), nullable=False)
summary = Column(Unicode(255))
downloads = Column(Integer, default=0)
package_id = Column(Integer, ForeignKey(Package.id),
nullable=False)
author_id = Column(Integer, ForeignKey(User.id))
maintainer_id = Column(Integer, ForeignKey(User.id))
stable_version = Column(Unicode(60))
home_page = Column(Unicode(255))
license = Column(UnicodeText())
description = Column(UnicodeText())
keywords = Column(Unicode(255))
platform = Column(Unicode(80))
download_url = Column(Unicode(800))
bugtrack_url = Column(Unicode(800))
docs_url = Column(Unicode(800))
classifiers = relationship(Classifier, secondary=classifier__release,
lazy='dynamic', cascade='all, delete')
package = relationship(Package, lazy='joined',
backref=backref('releases',
cascade='all, delete-orphan'))
author = relationship(User, primaryjoin=author_id == User.id)
maintainer = relationship(User, primaryjoin=maintainer_id == User.id)
@property
def download_url_file(self):
"""
Filename of the download_url if any.
"""
url = self.download_url
return url.rsplit('/', 1).pop() if url else None
@property
def can_download_url_whl(self):
filename = self.download_url_file.split('#').pop(0)
return (self.filename.endswith('.tar.gz') or
self.filename.endswith('.tar.bz2') or
self.filename.endswith('.zip'))
@property
def whlify_download_url_file(self):
return _whlify(self.download_url_file.split('#').pop(0))
@classmethod
def by_version(cls, session, package_name, version):
"""
Get release for a given version.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param package_name: package name
:type package_name: unicode
:param version: version
:type version: unicode
:return: release instance
:rtype: :class:`pyshop.models.Release`
"""
return cls.first(session,
join=(Package,),
where=((Package.name == package_name),
(cls.version == version)))
@classmethod
def by_classifiers(cls, session, classifiers):
"""
Get releases for given classifiers.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param classifiers: classifiers
:type classifiers: unicode
:return: release instances
:rtype: generator of :class:`pyshop.models.Release`
"""
return cls.find(session,
join=(cls.classifiers,),
where=(Classifier.name.in_(classifiers),),
)
@classmethod
def search(cls, session, opts, operator):
"""
Get releases for given filters.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param opts: filtering options
:type opts: dict
:param operator: filtering options joining operator (`and` or `or`)
:type operator: basestring
:return: release instances
:rtype: generator of :class:`pyshop.models.Release`
"""
available = {'name': Package.name,
'version': cls.version,
'author': User.login,
'author_email': User.email,
'maintainer': User.login,
'maintainer_email': User.email,
'home_page': cls.home_page,
'license': cls.license,
'summary': cls.summary,
'description': cls.description,
'keywords': cls.keywords,
'platform': cls.platform,
'download_url': cls.download_url
}
oper = {'or': or_, 'and': and_}
join_map = {'name': Package,
'author': cls.author,
'author_email': cls.author,
'maintainer': cls.maintainer,
'maintainer_email': cls.maintainer,
}
where = []
join = []
for opt, val in opts.items():
field = available[opt]
if hasattr(val, '__iter__') and len(val) > 1:
stmt = or_(*[field.like(u'%%%s%%' % v) for v in val])
else:
stmt = field.like(u'%%%s%%' % val)
where.append(stmt)
if opt in join_map:
join.append(join_map[opt])
return cls.find(session, join=join,
where=(oper[operator](*where),))
class ReleaseFile(Base):
"""
Describe a release file.
"""
release_id = Column(Integer, ForeignKey(Release.id),
nullable=False)
filename = Column(Unicode(200), unique=True, nullable=False)
md5_digest = Column(Unicode(50))
size = Column(Integer)
package_type = Column(Enum(u'sdist', u'bdist_egg', u'bdist_msi',
u'bdist_dmg', u'bdist_rpm', u'bdist_dumb',
u'bdist_wininst',
u'bdist_wheel',
name='enum_release_file_package_type'),
nullable=False)
python_version = Column(Unicode(25))
url = Column(Unicode(1024))
downloads = Column(Integer, default=0)
has_sig = Column(Boolean, default=False)
comment_text = Column(UnicodeText())
release = relationship(Release, lazy='joined',
backref=backref('files',
cascade='all, delete-orphan'))
@property
def filename_whlified(self):
assert self.package_type == 'sdist'
return _whlify(self.filename)
@classmethod
def by_release(cls, session, package_name, version):
"""
Get release files for a given package
name and for a given version.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param package_name: package name
:type package_name: unicode
:param version: version
:type version: unicode
:return: release files
:rtype: generator of :class:`pyshop.models.ReleaseFile`
"""
return cls.find(session,
join=(Release, Package),
where=(Package.name == package_name,
Release.version == version,
))
@classmethod
def by_filename(cls, session, release, filename):
"""
Get a release file for a given release and a given filename.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param release: release
:type release: :class:`pyshop.models.Release`
:param filename: filename of the release file
:type filename: unicode
:return: release file
:rtype: :class:`pyshop.models.ReleaseFile`
"""
return cls.first(session,
where=(ReleaseFile.release_id == release.id,
ReleaseFile.filename == filename,
))
| bsd-3-clause |
jeffmcnd/tfrs | server/test_api_complex.py | 1 | 5553 | """
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.utils import timezone
import json
from django.test import TestCase
from django.test import Client
import django
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework import status
from . import fakedata
from .models.Audit import Audit
from .serializers import AuditSerializer
from .models.CreditTrade import CreditTrade
from .serializers import CreditTradeSerializer
from .models.CreditTradeHistory import CreditTradeHistory
from .serializers import CreditTradeHistorySerializer
from .models.CreditTradeStatus import CreditTradeStatus
from .serializers import CreditTradeStatusSerializer
from .models.CreditTradeType import CreditTradeType
from .serializers import CreditTradeTypeSerializer
from .models.CurrentUserViewModel import CurrentUserViewModel
from .serializers import CurrentUserViewModelSerializer
from .models.FuelSupplier import FuelSupplier
from .serializers import FuelSupplierSerializer
from .models.FuelSupplierActionsType import FuelSupplierActionsType
from .serializers import FuelSupplierActionsTypeSerializer
from .models.FuelSupplierAttachment import FuelSupplierAttachment
from .serializers import FuelSupplierAttachmentSerializer
from .models.FuelSupplierAttachmentTag import FuelSupplierAttachmentTag
from .serializers import FuelSupplierAttachmentTagSerializer
from .models.FuelSupplierBalance import FuelSupplierBalance
from .serializers import FuelSupplierBalanceSerializer
from .models.FuelSupplierCCData import FuelSupplierCCData
from .serializers import FuelSupplierCCDataSerializer
from .models.FuelSupplierContact import FuelSupplierContact
from .serializers import FuelSupplierContactSerializer
from .models.FuelSupplierContactRole import FuelSupplierContactRole
from .serializers import FuelSupplierContactRoleSerializer
from .models.FuelSupplierHistory import FuelSupplierHistory
from .serializers import FuelSupplierHistorySerializer
from .models.FuelSupplierStatus import FuelSupplierStatus
from .serializers import FuelSupplierStatusSerializer
from .models.FuelSupplierType import FuelSupplierType
from .serializers import FuelSupplierTypeSerializer
from .models.Notification import Notification
from .serializers import NotificationSerializer
from .models.NotificationEvent import NotificationEvent
from .serializers import NotificationEventSerializer
from .models.NotificationType import NotificationType
from .serializers import NotificationTypeSerializer
from .models.NotificationViewModel import NotificationViewModel
from .serializers import NotificationViewModelSerializer
from .models.Opportunity import Opportunity
from .serializers import OpportunitySerializer
from .models.OpportunityHistory import OpportunityHistory
from .serializers import OpportunityHistorySerializer
from .models.OpportunityStatus import OpportunityStatus
from .serializers import OpportunityStatusSerializer
from .models.Permission import Permission
from .serializers import PermissionSerializer
from .models.PermissionViewModel import PermissionViewModel
from .serializers import PermissionViewModelSerializer
from .models.Role import Role
from .serializers import RoleSerializer
from .models.RolePermission import RolePermission
from .serializers import RolePermissionSerializer
from .models.RolePermissionViewModel import RolePermissionViewModel
from .serializers import RolePermissionViewModelSerializer
from .models.RoleViewModel import RoleViewModel
from .serializers import RoleViewModelSerializer
from .models.User import User
from .serializers import UserSerializer
from .models.UserDetailsViewModel import UserDetailsViewModel
from .serializers import UserDetailsViewModelSerializer
from .models.UserFavourite import UserFavourite
from .serializers import UserFavouriteSerializer
from .models.UserFavouriteViewModel import UserFavouriteViewModel
from .serializers import UserFavouriteViewModelSerializer
from .models.UserRole import UserRole
from .serializers import UserRoleSerializer
from .models.UserRoleViewModel import UserRoleViewModel
from .serializers import UserRoleViewModelSerializer
from .models.UserViewModel import UserViewModel
from .serializers import UserViewModelSerializer
# Complex API test cases.
# If an API operation contains generated code and requires a complex model object
# (containing child items) then it is tested in this file.
#
# This file will have to be edited by hand.
class Test_Api_Complex(TestCase):
def setUp(self):
# Every test needs a client.
self.client = Client()
# needed to setup django
django.setup()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
jody-frankowski/ansible | lib/ansible/runner/filter_plugins/core.py | 2 | 9090 | # (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import base64
import json
import os.path
import yaml
import types
import pipes
import glob
import re
import collections
import operator as py_operator
from ansible import errors
from ansible.utils import md5s, checksum_s
from distutils.version import LooseVersion, StrictVersion
from random import SystemRandom
from jinja2.filters import environmentfilter
def to_nice_yaml(*a, **kw):
'''Make verbose, human readable yaml'''
return yaml.safe_dump(*a, indent=4, allow_unicode=True, default_flow_style=False, **kw)
def to_json(a, *args, **kw):
''' Convert the value to JSON '''
return json.dumps(a, *args, **kw)
def to_nice_json(a, *args, **kw):
'''Make verbose, human readable JSON'''
return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
def failed(*a, **kw):
''' Test if task result yields failed '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|failed expects a dictionary")
rc = item.get('rc',0)
failed = item.get('failed',False)
if rc != 0 or failed:
return True
else:
return False
def success(*a, **kw):
''' Test if task result yields success '''
return not failed(*a, **kw)
def changed(*a, **kw):
''' Test if task result yields changed '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|changed expects a dictionary")
if not 'changed' in item:
changed = False
if ('results' in item # some modules return a 'results' key
and type(item['results']) == list
and type(item['results'][0]) == dict):
for result in item['results']:
changed = changed or result.get('changed', False)
else:
changed = item.get('changed', False)
return changed
def skipped(*a, **kw):
''' Test if task result yields skipped '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|skipped expects a dictionary")
skipped = item.get('skipped', False)
return skipped
def mandatory(a):
''' Make a variable mandatory '''
try:
a
except NameError:
raise errors.AnsibleFilterError('Mandatory variable not defined.')
else:
return a
def bool(a):
''' return a bool for the arg '''
if a is None or type(a) == bool:
return a
if type(a) in types.StringTypes:
a = a.lower()
if a in ['yes', 'on', '1', 'true', 1]:
return True
else:
return False
def quote(a):
''' return its argument quoted for shell usage '''
return pipes.quote(a)
def fileglob(pathname):
''' return list of matched files for glob '''
return glob.glob(pathname)
def regex(value='', pattern='', ignorecase=False, match_type='search'):
''' Expose `re` as a boolean filter using the `search` method by default.
This is likely only useful for `search` and `match` which already
have their own filters.
'''
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
_bool = __builtins__.get('bool')
return _bool(getattr(_re, match_type, 'search')(value))
def match(value, pattern='', ignorecase=False):
''' Perform a `re.match` returning a boolean '''
return regex(value, pattern, ignorecase, 'match')
def search(value, pattern='', ignorecase=False):
''' Perform a `re.search` returning a boolean '''
return regex(value, pattern, ignorecase, 'search')
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
if not isinstance(value, basestring):
value = str(value)
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
return _re.sub(replacement, value)
def unique(a):
if isinstance(a,collections.Hashable):
c = set(a)
else:
c = []
for x in a:
if x not in c:
c.append(x)
return c
def intersect(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) & set(b)
else:
c = unique(filter(lambda x: x in b, a))
return c
def difference(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) - set(b)
else:
c = unique(filter(lambda x: x not in b, a))
return c
def symmetric_difference(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) ^ set(b)
else:
c = unique(filter(lambda x: x not in intersect(a,b), union(a,b)))
return c
def union(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) | set(b)
else:
c = unique(a + b)
return c
def min(a):
_min = __builtins__.get('min')
return _min(a);
def max(a):
_max = __builtins__.get('max')
return _max(a);
def version_compare(value, version, operator='eq', strict=False):
''' Perform a version comparison on a value '''
op_map = {
'==': 'eq', '=': 'eq', 'eq': 'eq',
'<': 'lt', 'lt': 'lt',
'<=': 'le', 'le': 'le',
'>': 'gt', 'gt': 'gt',
'>=': 'ge', 'ge': 'ge',
'!=': 'ne', '<>': 'ne', 'ne': 'ne'
}
if strict:
Version = StrictVersion
else:
Version = LooseVersion
if operator in op_map:
operator = op_map[operator]
else:
raise errors.AnsibleFilterError('Invalid operator type')
try:
method = getattr(py_operator, operator)
return method(Version(str(value)), Version(str(version)))
except Exception, e:
raise errors.AnsibleFilterError('Version comparison: %s' % e)
@environmentfilter
def rand(environment, end, start=None, step=None):
r = SystemRandom()
if isinstance(end, (int, long)):
if not start:
start = 0
if not step:
step = 1
return r.randrange(start, end, step)
elif hasattr(end, '__iter__'):
if start or step:
raise errors.AnsibleFilterError('start and step can only be used with integer values')
return r.choice(end)
else:
raise errors.AnsibleFilterError('random can only be used on sequences and integers')
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
# base 64
'b64decode': base64.b64decode,
'b64encode': base64.b64encode,
# json
'to_json': to_json,
'to_nice_json': to_nice_json,
'from_json': json.loads,
# yaml
'to_yaml': yaml.safe_dump,
'to_nice_yaml': to_nice_yaml,
'from_yaml': yaml.safe_load,
# path
'basename': os.path.basename,
'dirname': os.path.dirname,
'expanduser': os.path.expanduser,
'realpath': os.path.realpath,
'relpath': os.path.relpath,
# failure testing
'failed' : failed,
'success' : success,
# changed testing
'changed' : changed,
# skip testing
'skipped' : skipped,
# variable existence
'mandatory': mandatory,
# value as boolean
'bool': bool,
# quote string for shell usage
'quote': quote,
# hash filters
# md5 hex digest of string
'md5': md5s,
# sha1 hex digeset of string
'sha1': checksum_s,
# checksum of string as used by ansible for checksuming files
'checksum': checksum_s,
# file glob
'fileglob': fileglob,
# regex
'match': match,
'search': search,
'regex': regex,
'regex_replace': regex_replace,
# list
'unique' : unique,
'intersect': intersect,
'difference': difference,
'symmetric_difference': symmetric_difference,
'union': union,
'min' : min,
'max' : max,
# version comparison
'version_compare': version_compare,
# random numbers
'random': rand,
}
| gpl-3.0 |
zahodi/ansible | lib/ansible/modules/monitoring/zabbix_host.py | 25 | 22640 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: zabbix_host
short_description: Zabbix host creates/updates/deletes
description:
- This module allows you to create, modify and delete Zabbix host entries and associated group and template data.
version_added: "2.0"
author:
- "(@cove)"
- "Tony Minfei Ding"
- "Harrison Gu (@harrisongu)"
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name, used to authenticate against the server.
required: true
login_password:
description:
- Zabbix user password.
required: true
http_login_user:
description:
- Basic Auth login
required: false
default: None
version_added: "2.1"
http_login_password:
description:
- Basic Auth password
required: false
default: None
version_added: "2.1"
host_name:
description:
- Name of the host in Zabbix.
- host_name is the unique identifier used and cannot be updated using this module.
required: true
visible_name:
description:
- Visible name of the host in Zabbix.
required: false
version_added: '2.3'
host_groups:
description:
- List of host groups the host is part of.
required: false
link_templates:
description:
- List of templates linked to the host.
required: false
default: None
inventory_mode:
description:
- Configure the inventory mode.
choices: ['automatic', 'manual', 'disabled']
required: false
default: None
version_added: '2.1'
status:
description:
- Monitoring status of the host.
required: false
choices: ['enabled', 'disabled']
default: "enabled"
state:
description:
- State of the host.
- On C(present), it will create if host does not exist or update the host if the associated data is different.
- On C(absent) will remove a host if it exists.
required: false
choices: ['present', 'absent']
default: "present"
timeout:
description:
- The timeout of API request (seconds).
default: 10
proxy:
description:
- The name of the Zabbix Proxy to be used
default: None
interfaces:
description:
- List of interfaces to be created for the host (see example below).
- 'Available values are: dns, ip, main, port, type and useip.'
- Please review the interface documentation for more information on the supported properties
- 'https://www.zabbix.com/documentation/2.0/manual/appendix/api/hostinterface/definitions#host_interface'
required: false
default: []
force:
description:
- Overwrite the host configuration, even if already present
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
'''
EXAMPLES = '''
- name: Create a new host or update an existing host's info
local_action:
module: zabbix_host
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
visible_name: ExampleName
host_groups:
- Example group1
- Example group2
link_templates:
- Example template1
- Example template2
status: enabled
state: present
inventory_mode: automatic
interfaces:
- type: 1
main: 1
useip: 1
ip: 10.xx.xx.xx
dns: ""
port: 10050
- type: 4
main: 1
useip: 1
ip: 10.xx.xx.xx
dns: ""
port: 12345
proxy: a.zabbix.proxy
'''
import logging
import copy
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
# Extend the ZabbixAPI
# Since the zabbix-api python module too old (version 1.0, no higher version so far),
# it does not support the 'hostinterface' api calls,
# so we have to inherit the ZabbixAPI class to add 'hostinterface' support.
class ZabbixAPIExtends(ZabbixAPI):
hostinterface = None
def __init__(self, server, timeout, user, passwd, **kwargs):
ZabbixAPI.__init__(self, server, timeout=timeout, user=user, passwd=passwd)
self.hostinterface = ZabbixAPISubClass(self, dict({"prefix": "hostinterface"}, **kwargs))
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
class Host(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# exist host
def is_host_exist(self, host_name):
result = self._zapi.host.get({'filter': {'host': host_name}})
return result
# check if host group exists
def check_host_group_exist(self, group_names):
for group_name in group_names:
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
if not result:
self._module.fail_json(msg="Hostgroup not found: %s" % group_name)
return True
def get_template_ids(self, template_list):
template_ids = []
if template_list is None or len(template_list) == 0:
return template_ids
for template in template_list:
template_list = self._zapi.template.get({'output': 'extend', 'filter': {'host': template}})
if len(template_list) < 1:
self._module.fail_json(msg="Template not found: %s" % template)
else:
template_id = template_list[0]['templateid']
template_ids.append(template_id)
return template_ids
def add_host(self, host_name, group_ids, status, interfaces, proxy_id, visible_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
parameters = {'host': host_name, 'interfaces': interfaces, 'groups': group_ids, 'status': status}
if proxy_id:
parameters['proxy_hostid'] = proxy_id
if visible_name:
parameters['name'] = visible_name
host_list = self._zapi.host.create(parameters)
if len(host_list) >= 1:
return host_list['hostids'][0]
except Exception as e:
self._module.fail_json(msg="Failed to create host %s: %s" % (host_name, e))
def update_host(self, host_name, group_ids, status, host_id, interfaces, exist_interface_list, proxy_id, visible_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
parameters = {'hostid': host_id, 'groups': group_ids, 'status': status}
if proxy_id:
parameters['proxy_hostid'] = proxy_id
if visible_name:
parameters['name'] = visible_name
self._zapi.host.update(parameters)
interface_list_copy = exist_interface_list
if interfaces:
for interface in interfaces:
flag = False
interface_str = interface
for exist_interface in exist_interface_list:
interface_type = interface['type']
exist_interface_type = int(exist_interface['type'])
if interface_type == exist_interface_type:
# update
interface_str['interfaceid'] = exist_interface['interfaceid']
self._zapi.hostinterface.update(interface_str)
flag = True
interface_list_copy.remove(exist_interface)
break
if not flag:
# add
interface_str['hostid'] = host_id
self._zapi.hostinterface.create(interface_str)
# remove
remove_interface_ids = []
for remove_interface in interface_list_copy:
interface_id = remove_interface['interfaceid']
remove_interface_ids.append(interface_id)
if len(remove_interface_ids) > 0:
self._zapi.hostinterface.delete(remove_interface_ids)
except Exception as e:
self._module.fail_json(msg="Failed to update host %s: %s" % (host_name, e))
def delete_host(self, host_id, host_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.delete([host_id])
except Exception as e:
self._module.fail_json(msg="Failed to delete host %s: %s" % (host_name, e))
# get host by host name
def get_host_by_host_name(self, host_name):
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': [host_name]}})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
return host_list[0]
# get proxyid by proxy name
def get_proxyid_by_proxy_name(self, proxy_name):
proxy_list = self._zapi.proxy.get({'output': 'extend', 'filter': {'host': [proxy_name]}})
if len(proxy_list) < 1:
self._module.fail_json(msg="Proxy not found: %s" % proxy_name)
else:
return proxy_list[0]['proxyid']
# get group ids by group names
def get_group_ids_by_group_names(self, group_names):
group_ids = []
if self.check_host_group_exist(group_names):
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': group_names}})
for group in group_list:
group_id = group['groupid']
group_ids.append({'groupid': group_id})
return group_ids
# get host templates by host id
def get_host_templates_by_host_id(self, host_id):
template_ids = []
template_list = self._zapi.template.get({'output': 'extend', 'hostids': host_id})
for template in template_list:
template_ids.append(template['templateid'])
return template_ids
# get host groups by host id
def get_host_groups_by_host_id(self, host_id):
exist_host_groups = []
host_groups_list = self._zapi.hostgroup.get({'output': 'extend', 'hostids': host_id})
if len(host_groups_list) >= 1:
for host_groups_name in host_groups_list:
exist_host_groups.append(host_groups_name['name'])
return exist_host_groups
# check the exist_interfaces whether it equals the interfaces or not
def check_interface_properties(self, exist_interface_list, interfaces):
interfaces_port_list = []
if interfaces is not None:
if len(interfaces) >= 1:
for interface in interfaces:
interfaces_port_list.append(int(interface['port']))
exist_interface_ports = []
if len(exist_interface_list) >= 1:
for exist_interface in exist_interface_list:
exist_interface_ports.append(int(exist_interface['port']))
if set(interfaces_port_list) != set(exist_interface_ports):
return True
for exist_interface in exist_interface_list:
exit_interface_port = int(exist_interface['port'])
for interface in interfaces:
interface_port = int(interface['port'])
if interface_port == exit_interface_port:
for key in interface.keys():
if str(exist_interface[key]) != str(interface[key]):
return True
return False
# get the status of host by host
def get_host_status_by_host(self, host):
return host['status']
# check all the properties before link or clear template
def check_all_properties(self, host_id, host_groups, status, interfaces, template_ids,
exist_interfaces, host, proxy_id, visible_name):
# get the existing host's groups
exist_host_groups = self.get_host_groups_by_host_id(host_id)
if set(host_groups) != set(exist_host_groups):
return True
# get the existing status
exist_status = self.get_host_status_by_host(host)
if int(status) != int(exist_status):
return True
# check the exist_interfaces whether it equals the interfaces or not
if self.check_interface_properties(exist_interfaces, interfaces):
return True
# get the existing templates
exist_template_ids = self.get_host_templates_by_host_id(host_id)
if set(list(template_ids)) != set(exist_template_ids):
return True
if host['proxy_hostid'] != proxy_id:
return True
if host['name'] != visible_name:
return True
return False
# link or clear template of the host
def link_or_clear_template(self, host_id, template_id_list):
# get host's exist template ids
exist_template_id_list = self.get_host_templates_by_host_id(host_id)
exist_template_ids = set(exist_template_id_list)
template_ids = set(template_id_list)
template_id_list = list(template_ids)
# get unlink and clear templates
templates_clear = exist_template_ids.difference(template_ids)
templates_clear_list = list(templates_clear)
request_str = {'hostid': host_id, 'templates': template_id_list, 'templates_clear': templates_clear_list}
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.update(request_str)
except Exception as e:
self._module.fail_json(msg="Failed to link template to host: %s" % e)
# Update the host inventory_mode
def update_inventory_mode(self, host_id, inventory_mode):
# nothing was set, do nothing
if not inventory_mode:
return
if inventory_mode == "automatic":
inventory_mode = int(1)
elif inventory_mode == "manual":
inventory_mode = int(0)
elif inventory_mode == "disabled":
inventory_mode = int(-1)
# watch for - https://support.zabbix.com/browse/ZBX-6033
request_str = {'hostid': host_id, 'inventory_mode': inventory_mode}
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.update(request_str)
except Exception as e:
self._module.fail_json(msg="Failed to set inventory_mode to host: %s" % e)
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
host_name=dict(type='str', required=True),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
host_groups=dict(type='list', required=False),
link_templates=dict(type='list', required=False),
status=dict(default="enabled", choices=['enabled', 'disabled']),
state=dict(default="present", choices=['present', 'absent']),
inventory_mode=dict(required=False, choices=['automatic', 'manual', 'disabled']),
timeout=dict(type='int', default=10),
interfaces=dict(type='list', required=False),
force=dict(type='bool', default=True),
proxy=dict(type='str', required=False),
visible_name=dict(type='str', required=False)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing requried zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
host_name = module.params['host_name']
visible_name = module.params['visible_name']
host_groups = module.params['host_groups']
link_templates = module.params['link_templates']
inventory_mode = module.params['inventory_mode']
status = module.params['status']
state = module.params['state']
timeout = module.params['timeout']
interfaces = module.params['interfaces']
force = module.params['force']
proxy = module.params['proxy']
# convert enabled to 0; disabled to 1
status = 1 if status == "disabled" else 0
zbx = None
# login to zabbix
try:
zbx = ZabbixAPIExtends(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password)
zbx.login(login_user, login_password)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host = Host(module, zbx)
template_ids = []
if link_templates:
template_ids = host.get_template_ids(link_templates)
group_ids = []
if host_groups:
group_ids = host.get_group_ids_by_group_names(host_groups)
ip = ""
if interfaces:
for interface in interfaces:
if interface['type'] == 1:
ip = interface['ip']
# check if host exist
is_host_exist = host.is_host_exist(host_name)
if is_host_exist:
# Use proxy specified, or set to None when updating host
if proxy:
proxy_id = host.get_proxyid_by_proxy_name(proxy)
else:
proxy_id = None
# get host id by host name
zabbix_host_obj = host.get_host_by_host_name(host_name)
host_id = zabbix_host_obj['hostid']
if state == "absent":
# remove host
host.delete_host(host_id, host_name)
module.exit_json(changed=True, result="Successfully delete host %s" % host_name)
else:
if not group_ids:
module.fail_json(msg="Specify at least one group for updating host '%s'." % host_name)
if not force:
module.fail_json(changed=False, result="Host present, Can't update configuration without force")
# get exist host's interfaces
exist_interfaces = host._zapi.hostinterface.get({'output': 'extend', 'hostids': host_id})
exist_interfaces_copy = copy.deepcopy(exist_interfaces)
# update host
interfaces_len = len(interfaces) if interfaces else 0
if len(exist_interfaces) > interfaces_len:
if host.check_all_properties(host_id, host_groups, status, interfaces, template_ids,
exist_interfaces, zabbix_host_obj, proxy_id, visible_name):
host.link_or_clear_template(host_id, template_ids)
host.update_host(host_name, group_ids, status, host_id,
interfaces, exist_interfaces, proxy_id, visible_name)
module.exit_json(changed=True,
result="Successfully update host %s (%s) and linked with template '%s'"
% (host_name, ip, link_templates))
else:
module.exit_json(changed=False)
else:
if host.check_all_properties(host_id, host_groups, status, interfaces, template_ids,
exist_interfaces_copy, zabbix_host_obj, proxy_id, visible_name):
host.update_host(host_name, group_ids, status, host_id, interfaces, exist_interfaces, proxy_id, visible_name)
host.link_or_clear_template(host_id, template_ids)
host.update_inventory_mode(host_id, inventory_mode)
module.exit_json(changed=True,
result="Successfully update host %s (%s) and linked with template '%s'"
% (host_name, ip, link_templates))
else:
module.exit_json(changed=False)
else:
if state == "absent":
# the host is already deleted.
module.exit_json(changed=False)
# Use proxy specified, or set to 0 when adding new host
if proxy:
proxy_id = host.get_proxyid_by_proxy_name(proxy)
else:
proxy_id = 0
if not group_ids:
module.fail_json(msg="Specify at least one group for creating host '%s'." % host_name)
if not interfaces or (interfaces and len(interfaces) == 0):
module.fail_json(msg="Specify at least one interface for creating host '%s'." % host_name)
# create host
host_id = host.add_host(host_name, group_ids, status, interfaces, proxy_id, visible_name)
host.link_or_clear_template(host_id, template_ids)
host.update_inventory_mode(host_id, inventory_mode)
module.exit_json(changed=True, result="Successfully added host %s (%s) and linked with template '%s'" % (
host_name, ip, link_templates))
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
srvelivela/ansibledoc | lib/ansible/plugins/connection/ssh.py | 7 | 27787 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import pipes
import pty
import select
import shlex
import subprocess
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.unicode import to_bytes, to_unicode
SSHPASS_AVAILABLE = None
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
self._connected = True
return self
@staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for arg in command:
if 'controlpersist' in arg.lower():
controlpersist = True
elif 'controlpath' in arg.lower():
controlpath = True
return controlpersist, controlpath
@staticmethod
def _split_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
def _add_args(self, explanation, args):
"""
Adds the given args to self._command and displays a caller-supplied
explanation of why they were added.
"""
self._command += args
self._display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(args), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
self._command = []
## First, the command name.
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])]
self._command += [binary]
## Next, additional arguments based on the configuration.
# sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option.
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
self._command += ['-b', '-']
self._command += ['-C']
if self._play_context.verbosity > 3:
self._command += ['-vvv']
elif binary == 'ssh':
# Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q.
self._command += ['-q']
# Next, we add [ssh_connection]ssh_args from ansible.cfg.
if self._play_context.ssh_args:
args = self._split_args(self._play_context.ssh_args)
self._add_args("ansible.cfg set ssh_args", args)
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
self._add_args(
"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled",
("-o", "StrictHostKeyChecking=no")
)
if self._play_context.port is not None:
self._add_args(
"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set",
("-o", "Port={0}".format(self._play_context.port))
)
key = self._play_context.private_key_file
if key:
self._add_args(
"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set",
("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))
)
if not self._play_context.password:
self._add_args(
"ansible_password/ansible_ssh_pass not set", (
"-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"
)
)
user = self._play_context.remote_user
if user:
self._add_args(
"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set",
("-o", "User={0}".format(self._play_context.remote_user))
)
self._add_args(
"ANSIBLE_TIMEOUT/timeout set",
("-o", "ConnectTimeout={0}".format(self._play_context.timeout))
)
# Add in any common or binary-specific arguments from the PlayContext
# (i.e. inventory or task settings or overrides on the command line).
for opt in ['ssh_common_args', binary + '_extra_args']:
attr = getattr(self._play_context, opt, None)
if attr is not None:
args = self._split_args(attr)
self._add_args("PlayContext set %s" % opt, args)
# Check if ControlPersist is enabled and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(self._command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath('$HOME/.ansible/cp')
# The directory must exist and be writable.
makedirs_safe(cpdir, 0o700)
if not os.access(cpdir, os.W_OK):
raise AnsibleError("Cannot write to ControlPath %s" % cpdir)
args = ("-o", "ControlPath={0}".format(
C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))
)
self._add_args("found only ControlPersist; added ControlPath", args)
## Finally, we add any caller-supplied extras.
if other_args:
self._command += other_args
return self._command
def _send_initial_data(self, fh, in_data):
'''
Writes initial data to the stdin filehandle of the subprocess and closes
it. (The handle must be closed; otherwise, for example, "sftp -b -" will
just hang forever waiting for more commands.)
'''
self._display.debug('Sending initial data')
try:
fh.write(in_data)
fh.close()
except (OSError, IOError):
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
self._display.debug('Sent initial data (%d bytes)' % len(in_data))
# Used by _run() to kill processes on failures
@staticmethod
def _terminate_process(p):
""" Terminate a process, ignoring errors """
try:
p.terminate()
except (OSError, IOError):
pass
# This is separate from _run() because we need to do the same thing for stdout
# and stderr.
def _examine_output(self, source, state, chunk, sudoable):
'''
Takes a string, extracts complete lines from it, tests to see if they
are a prompt, error message, etc., and sets appropriate flags in self.
Prompt and success lines are removed.
Returns the processed (i.e. possibly-edited) output and the unprocessed
remainder (to be processed with the next chunk) as strings.
'''
output = []
for l in chunk.splitlines(True):
suppress_output = False
# self._display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
if self._play_context.prompt and self.check_password_prompt(l):
self._display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_prompt'] = True
suppress_output = True
elif self._play_context.success_key and self.check_become_success(l):
self._display.debug("become_success: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_success'] = True
suppress_output = True
elif sudoable and self.check_incorrect_password(l):
self._display.debug("become_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_error'] = True
elif sudoable and self.check_missing_password(l):
self._display.debug("become_nopasswd_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_nopasswd_error'] = True
if not suppress_output:
output.append(l)
# The chunk we read was most likely a series of complete lines, but just
# in case the last line was incomplete (and not a prompt, which we would
# have removed from the output), we retain it to be processed with the
# next chunk.
remainder = ''
if output and not output[-1].endswith('\n'):
remainder = output[-1]
output = output[:-1]
return ''.join(output), remainder
def _run(self, cmd, in_data, sudoable=True):
'''
Starts the command and communicates with it until it ends.
'''
display_cmd = map(pipes.quote, cmd[:-1]) + [cmd[-1]]
self._display.vvv('SSH: EXEC {0}'.format(' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
# to use a pseudo-tty (ssh will have been invoked with -tt). If we are
# pipelining data, or can't create a pty, we fall back to using plain
# old pipes.
p = None
if not in_data:
try:
# Make sure stdin is a proper pty to avoid tcgetattr errors
master, slave = pty.openpty()
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
os.close(slave)
except (OSError, IOError):
p = None
if not p:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
# If we are using SSH password authentication, write the password into
# the pipe we opened in _build_command.
if self._play_context.password:
os.close(self.sshpass_pipe[0])
os.write(self.sshpass_pipe[1], "{0}\n".format(self._play_context.password))
os.close(self.sshpass_pipe[1])
## SSH state machine
#
# Now we read and accumulate output from the running process until it
# exits. Depending on the circumstances, we may also need to write an
# escalation password and/or pipelined input to the process.
states = [
'awaiting_prompt', 'awaiting_escalation', 'ready_to_send', 'awaiting_exit'
]
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
state = states.index('ready_to_send')
if 'ssh' in cmd:
if self._play_context.prompt:
# We're requesting escalation with a password, so we have to
# wait for a password prompt.
state = states.index('awaiting_prompt')
self._display.debug('Initial state: %s: %s' % (states[state], self._play_context.prompt))
elif self._play_context.become and self._play_context.success_key:
# We're requesting escalation without a password, so we have to
# detect success/failure before sending any initial data.
state = states.index('awaiting_escalation')
self._display.debug('Initial state: %s: %s' % (states[state], self._play_context.success_key))
# We store accumulated stdout and stderr output from the process here,
# but strip any privilege escalation prompt/confirmation lines first.
# Output is accumulated into tmp_*, complete lines are extracted into
# an array, then checked and removed or copied to stdout or stderr. We
# set any flags based on examining the output in self._flags.
stdout = stderr = ''
tmp_stdout = tmp_stderr = ''
self._flags = dict(
become_prompt=False, become_success=False,
become_error=False, become_nopasswd_error=False
)
timeout = self._play_context.timeout
rpipes = [p.stdout, p.stderr]
for fd in rpipes:
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
# If we can send initial data without waiting for anything, we do so
# before we call select.
if states[state] == 'ready_to_send' and in_data:
self._send_initial_data(stdin, in_data)
state += 1
while True:
rfd, wfd, efd = select.select(rpipes, [], [], timeout)
# We pay attention to timeouts only while negotiating a prompt.
if not rfd:
if state <= states.index('awaiting_escalation'):
self._terminate_process(p)
raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, stdout))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
if p.stdout in rfd:
chunk = p.stdout.read()
if chunk == '':
rpipes.remove(p.stdout)
tmp_stdout += chunk
self._display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
if p.stderr in rfd:
chunk = p.stderr.read()
if chunk == '':
rpipes.remove(p.stderr)
tmp_stderr += chunk
self._display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
# Afterwards, we can accumulate output without looking at it.
if state < states.index('ready_to_send'):
if tmp_stdout:
output, unprocessed = self._examine_output('stdout', states[state], tmp_stdout, sudoable)
stdout += output
tmp_stdout = unprocessed
if tmp_stderr:
output, unprocessed = self._examine_output('stderr', states[state], tmp_stderr, sudoable)
stderr += output
tmp_stderr = unprocessed
else:
stdout += tmp_stdout
stderr += tmp_stderr
tmp_stdout = tmp_stderr = ''
# If we see a privilege escalation prompt, we send the password.
if states[state] == 'awaiting_prompt' and self._flags['become_prompt']:
self._display.debug('Sending become_pass in response to prompt')
stdin.write(self._play_context.become_pass + '\n')
self._flags['become_prompt'] = False
state += 1
# We've requested escalation (with or without a password), now we
# wait for an error message or a successful escalation.
if states[state] == 'awaiting_escalation':
if self._flags['become_success']:
self._display.debug('Escalation succeeded')
self._flags['become_success'] = False
state += 1
elif self._flags['become_error']:
self._display.debug('Escalation failed')
self._terminate_process(p)
self._flags['become_error'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
elif self._flags['become_nopasswd_error']:
self._display.debug('Escalation requires password')
self._terminate_process(p)
self._flags['become_nopasswd_error'] = False
raise AnsibleError('Missing %s password' % self._play_context.become_method)
elif self._flags['become_prompt']:
# This shouldn't happen, because we should see the "Sorry,
# try again" message first.
self._display.debug('Escalation prompt repeated')
self._terminate_process(p)
self._flags['become_prompt'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
# Once we're sure that the privilege escalation prompt, if any, has
# been dealt with, we can send any initial data and start waiting
# for output.
if states[state] == 'ready_to_send':
if in_data:
self._send_initial_data(stdin, in_data)
state += 1
# Now we're awaiting_exit: has the child process exited? If it has,
# and we've read all available output from it, we're done.
if p.poll() is not None:
if not rpipes or not rfd:
break
# When ssh has ControlMaster (+ControlPath/Persist) enabled, the
# first connection goes into the background and we never see EOF
# on stderr. If we see EOF on stdout and the process has exited,
# we're probably done. We call select again with a zero timeout,
# just to make certain we don't miss anything that may have been
# written to stderr between the time we called select() and when
# we learned that the process had finished.
if not p.stdout in rpipes:
timeout = 0
continue
# If the process has not yet exited, but we've already read EOF from
# its stdout and stderr (and thus removed both from rpipes), we can
# just wait for it to exit.
elif not rpipes:
p.wait()
break
# Otherwise there may still be outstanding data to read.
# close stdin after process is terminated and stdout/stderr are read
# completely (see also issue #848)
stdin.close()
if C.HOST_KEY_CHECKING:
if cmd[0] == "sshpass" and p.returncode == 6:
raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support this. Please add this host\'s fingerprint to your known_hosts file to manage this host.')
controlpersisterror = 'Bad configuration option: ControlPersist' in stderr or 'unknown configuration option: ControlPersist' in stderr
if p.returncode != 0 and controlpersisterror:
raise AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" (or ssh_args in [ssh_connection] section of the config file) before running again')
if p.returncode == 255 and in_data:
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
return (p.returncode, stdout, stderr)
def _exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the remote host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv("ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
# interactive-mode ("unexpected indent" mainly because of empty lines)
if in_data:
cmd = self._build_command('ssh', self.host, cmd)
else:
cmd = self._build_command('ssh', '-tt', self.host, cmd)
(returncode, stdout, stderr) = self._run(cmd, in_data, sudoable=sudoable)
return (returncode, stdout, stderr)
#
# Main public methods
#
def exec_command(self, *args, **kwargs):
"""
Wrapper around _exec_command to retry in the case of an ssh failure
Will retry if:
* an exception is caught
* ssh returns 255
Will not retry if
* remaining_tries is <2
* retries limit reached
"""
remaining_tries = int(C.ANSIBLE_SSH_RETRIES) + 1
cmd_summary = "%s..." % args[0]
for attempt in xrange(remaining_tries):
try:
return_tuple = self._exec_command(*args, **kwargs)
# 0 = success
# 1-254 = remote command return code
# 255 = failure from the ssh command itself
if return_tuple[0] != 255 or attempt == (remaining_tries - 1):
break
else:
raise AnsibleConnectionFailure("Failed to connect to the host via ssh.")
except (AnsibleConnectionFailure, Exception) as e:
if attempt == remaining_tries - 1:
raise e
else:
pause = 2 ** attempt - 1
if pause > 30:
pause = 30
if isinstance(e, AnsibleConnectionFailure):
msg = "ssh_retry: attempt: %d, ssh return code is 255. cmd (%s), pausing for %d seconds" % (attempt, cmd_summary, pause)
else:
msg = "ssh_retry: attempt: %d, caught exception(%s) from cmd (%s), pausing for %d seconds" % (attempt, e, cmd_summary, pause)
self._display.vv(msg)
time.sleep(pause)
continue
return return_tuple
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
self._display.vvv("PUT {0} TO {1}".format(in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(in_path))
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', in_path, '{0}:{1}'.format(host, pipes.quote(out_path)))
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(out_path, stdout, stderr))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', '{0}:{1}'.format(host, pipes.quote(in_path)), out_path)
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "get {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file from {0}:\n{1}\n{2}".format(in_path, stdout, stderr))
def close(self):
# If we have a persistent ssh connection (ControlPersist), we can ask it
# to stop listening. Otherwise, there's nothing to do here.
# TODO: reenable once winrm issues are fixed
# temporarily disabled as we are forced to currently close connections after every task because of winrm
# if self._connected and self._persistent:
# cmd = self._build_command('ssh', '-O', 'stop', self.host)
# p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# stdout, stderr = p.communicate()
self._connected = False
| gpl-3.0 |
jakesyl/pychess | lib/pychess/Utils/lutils/strateval.py | 20 | 25089 | """
This module differs from leval in that it is not optimized for speed.
It checks differences between last and current board, and returns not
scores, but strings describing the differences.
Can be used for commenting on board changes.
"""
from __future__ import absolute_import
from .ldata import *
from pychess.Utils.const import *
from pychess.Utils.lutils.attack import staticExchangeEvaluate, getAttacks, \
defends
from pychess.Utils.lutils.lmove import TCORD, FCORD, FLAG, PROMOTE_PIECE, toSAN
from pychess.Utils.lutils.lmovegen import genCaptures, genAllMoves, newMove
from pychess.Utils.lutils.validator import validateMove
from pychess.Utils.repr import reprColor, reprPiece
from . import leval
def join(items):
if len(items) == 1:
return items[0]
else:
s = "%s %s %s" % (items[-2], _("and"), items[-1])
if len(items) > 2:
s = ", ".join(items[:-2]+[s])
return s
#
# Functions can be of types:
# * Final: Will be shown alone: "mates", "draws"
# * Moves (s): Will always be shown: "put into *"
# * Prefix: Will always be shown: "castles", "promotes"
# * Attack: Will always be shown: "threaten", "preassures", "defendes"
# * Simple: (s) Max one will be shown: "develops", "activity"
# * State: (s) Will always be shown: "new *"
# * Tip: (s) Will sometimes be shown: "pawn storm", "cramped position"
#
def final_status (model, ply, phase):
if ply == model.ply:
if model.status == DRAW:
yield _("draws")
elif model.status in (WHITEWON,BLACKWON):
yield _("mates")
def offencive_moves_check (model, ply, phase):
if model.getBoardAtPly(ply).board.isChecked():
yield _("puts opponent in check")
def defencive_moves_safety (model, ply, phase):
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
if board.arBoard[TCORD(model.getMoveAtPly(ply-1).move)] != KING:
return
color = oldboard.color
opcolor = 1-color
delta_eval_king = leval.evalKing(board, color, phase) - \
leval.evalKing(oldboard, color, phase)
# PyChess points tropism to queen for phase <= 3. Thus we set a high phase
delta_eval_tropism = leval.evalKingTropism(board, opcolor, 10) - \
leval.evalKingTropism(oldboard, opcolor, 10)
# Notice, that tropism was negative
delta_score = delta_eval_king - delta_eval_tropism/2
if delta_score > 35:
yield _("improves king safety")
elif delta_score > 15:
yield _("slightly improves king safety")
def offencive_moves_rook (model, ply, phase):
move = model.getMoveAtPly(ply-1).move
fcord = FCORD(move)
tcord = TCORD(move)
board = model.getBoardAtPly(ply).board
color = 1-board.color
opcolor = 1-color
# We also detect rook-to-open castlings
if board.arBoard[tcord] == KING:
if FLAG(move) == QUEEN_CASTLE:
fcord = board.ini_rooks[color][0]
tcord = tcord+1
elif FLAG(move) == KING_CASTLE:
fcord = board.ini_rooks[color][1]
tcord = tcord-1
if board.arBoard[tcord] != ROOK:
return
color = 1-board.color
opcolor = 1-color
pawns = board.boards[color][PAWN]
oppawns = board.boards[opcolor][PAWN]
ffile = fileBits[FILE(FCORD(move))]
tfile = fileBits[FILE(tcord)]
if ffile & pawns and not tfile & pawns and bin(pawns).count("1") >= 3:
if not tfile & oppawns:
yield _("moves a rook to an open file")
else: yield _("moves an rook to a half-open file")
def offencive_moves_fianchetto (model, ply, phase):
board = model.getBoardAtPly(ply).board
tcord = TCORD(model.getMoveAtPly(ply-1).move)
movingcolor = 1-board.color
if movingcolor == WHITE:
if board.castling & W_OO and tcord == G2:
yield _("moves bishop into fianchetto: %s") % "g2"
elif board.castling & W_OOO and tcord == B2:
yield _("moves bishop into fianchetto: %s") % "b2"
else:
if board.castling & B_OO and tcord == G7:
yield _("moves bishop into fianchetto: %s") % "g7"
elif board.castling & B_OOO and tcord == B7:
yield _("moves bishop into fianchetto: %s") % "b7"
def prefix_type (model, ply, phase):
flag = FLAG(model.getMoveAtPly(ply-1).move)
if flag in PROMOTIONS:
yield _("promotes a Pawn to a %s") % reprPiece[PROMOTE_PIECE(flag)]
elif flag in (KING_CASTLE, QUEEN_CASTLE):
yield _("castles")
def attack_type (model, ply, phase):
# We set bishop value down to knight value, as it is what most people expect
bishopBackup = PIECE_VALUES[BISHOP]
PIECE_VALUES[BISHOP] = PIECE_VALUES[KNIGHT]
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
if ply - model.lowply >= 2:
oldmove = model.getMoveAtPly(ply-2).move
oldboard3 = model.getBoardAtPly(ply-2).board
else: oldmove = None
move = model.getMoveAtPly(ply-1).move
tcord = TCORD(move)
if oldboard.arBoard[tcord] != EMPTY:
if not (board.variant == FISCHERRANDOMCHESS and \
FLAG(move) in (KING_CASTLE, QUEEN_CASTLE)):
if oldmove and oldboard3.arBoard[TCORD(oldmove)] != EMPTY and \
TCORD(oldmove) == tcord:
yield _("takes back material")
else:
see = staticExchangeEvaluate(oldboard, move)
if see < 0:
yield _("sacrifies material")
elif see == 0:
yield _("exchanges material")
elif see > 0:
yield _("captures material")
PIECE_VALUES[BISHOP] = bishopBackup
def defencive_moves_tactic (model, ply, phase):
# ------------------------------------------------------------------------ #
# Test if we threat something, or at least put more pressure on it #
# ------------------------------------------------------------------------ #
# We set bishop value down to knight value, as it is what most people expect
bishopBackup = PIECE_VALUES[BISHOP]
PIECE_VALUES[BISHOP] = PIECE_VALUES[KNIGHT]
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
move = model.getMoveAtPly(ply-1).move
fcord = FCORD(move)
tcord = TCORD(move)
piece = board.arBoard[tcord]
found_threatens = []
found_increases = []
# What do we attack now?
board.setColor(1-board.color)
for ncap in genCaptures(board):
# getCaptures also generate promotions
if FLAG(ncap) in PROMOTIONS:
continue
# We are only interested in the attacks of the piece we just moved
if FCORD(ncap) != TCORD (move):
continue
# We don't want to move back
if TCORD(ncap) == FCORD(move):
continue
# We don't thread the king. We check him! (in another function)
if board.arBoard[TCORD(ncap)] == KING:
continue
# If we also was able to attack that cord last time, we don't care
if validateMove(oldboard, newMove(FCORD(move), TCORD(ncap))):
continue
# Test if we threats our enemy, at least more than before
see0 = staticExchangeEvaluate(oldboard, TCORD(ncap), 1-oldboard.color)
see1 = staticExchangeEvaluate(board, TCORD(ncap), 1-oldboard.color)
if see1 > see0:
# If a new winning capture has been created
if see1 > 0:
# Find the easiest attack
attacks = getAttacks (board, TCORD(ncap), board.color)
v, cord = min((PIECE_VALUES[board.arBoard[fc]],fc)
for fc in iterBits(attacks))
easiestAttack = newMove(cord, TCORD(ncap))
found_threatens.append(toSAN(board,easiestAttack, True))
# Even though we might not yet be strong enough, we might still
# have strengthened another friendly attack
else:
found_increases.append(reprCord[TCORD(ncap)])
board.setColor(1-board.color)
# -------------------------------------------------------------------- #
# Test if we defend a one of our pieces #
# -------------------------------------------------------------------- #
found_defends = []
# Test which pieces were under attack
used = []
for ncap in genCaptures(board):
# getCaptures also generate promotions
if FLAG(ncap) in PROMOTIONS:
continue
# We don't want to know about the same cord more than once
if TCORD(ncap) in used:
continue
used.append(TCORD(ncap))
# If the attack was poining on the piece we just moved, we ignore it
if TCORD(ncap) == FCORD(move) or TCORD(ncap) == TCORD(move):
continue
# If we were already defending the piece, we don't send a new
# message
if defends(oldboard, FCORD(move), TCORD(ncap)):
continue
# If the attack was not strong, we ignore it
see = staticExchangeEvaluate(oldboard, ncap)
if see < 0: continue
v = defends(board, TCORD(move), TCORD(ncap))
# If the defend didn't help, it doesn't matter. Like defending a
# bishop, threatened by a pawn, with a queen.
# But on the other hand - it might still be a defend...
# newsee = staticExchangeEvaluate(board, ncap)
# if newsee <= see: continue
if v:
found_defends.append(reprCord[TCORD(ncap)])
# ------------------------------------------------------------------------ #
# Test if we are rescuing an otherwise exposed piece #
# ------------------------------------------------------------------------ #
# Rescuing is only an option, if our own move wasn't an attack
if oldboard.arBoard[tcord] == EMPTY:
see0 = staticExchangeEvaluate(oldboard, fcord, oldboard.color)
see1 = staticExchangeEvaluate(board, tcord, oldboard.color)
if see1 > see0 and see1 > 0:
yield _("rescues a %s") % reprPiece[board.arBoard[tcord]].lower()
if found_threatens:
yield _("threatens to win material by %s") % join(found_threatens)
if found_increases:
yield _("increases the pressure on %s") % join(found_increases)
if found_defends:
yield _("defends %s") % join(found_defends)
PIECE_VALUES[BISHOP] = bishopBackup
def offencive_moves_pin (model, ply, phase):
board = model.getBoardAtPly(ply).board
move = model.getMoveAtPly(ply-1).move
fcord = FCORD(move)
tcord = TCORD(move)
piece = board.arBoard[tcord]
ray = 0
if piece in (BISHOP, QUEEN):
ray |= (ray45[tcord] | ray135[tcord]) & ~(ray45[fcord] | ray135[fcord])
if piece in (ROOK, QUEEN):
ray |= (ray00[tcord] | ray90[tcord]) & ~(ray00[fcord] | ray90[fcord])
if ray:
for c in iterBits(ray & board.friends[board.color]):
# We don't pin on pieces that are less worth than us
if not PIECE_VALUES[piece] < PIECE_VALUES[board.arBoard[c]]:
continue
# There should be zero friendly pieces in between
ray = fromToRay[tcord][c]
if ray & board.friends[1-board.color]:
continue
# There should be exactly one opponent piece in between
op = clearBit(ray & board.friends[board.color], c)
if bin(op).count("1") != 1:
continue
# The king can't be pinned
pinned = lastBit(op)
oppiece = board.arBoard[pinned]
if oppiece == KING:
continue
# Yield
yield _("pins an enemy %(oppiece)s on the %(piece)s at %(cord)s") % {
'oppiece': reprPiece[oppiece].lower(),
'piece': reprPiece[board.arBoard[c]].lower(),
'cord': reprCord[c]}
def state_outpost (model, ply, phase):
if phase >= 6:
# Doesn't make sense in endgame
return
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
color = 1-board.color
opcolor = 1-color
wpawns = board.boards[WHITE][PAWN]
oldwpawns = oldboard.boards[WHITE][PAWN]
bpawns = board.boards[BLACK][PAWN]
oldbpawns = oldboard.boards[BLACK][PAWN]
wpieces = board.boards[WHITE][BISHOP] | board.boards[WHITE][KNIGHT]
oldwpieces = oldboard.boards[WHITE][BISHOP] | oldboard.boards[WHITE][KNIGHT]
bpieces = board.boards[BLACK][BISHOP] | board.boards[BLACK][KNIGHT]
oldbpieces = oldboard.boards[BLACK][BISHOP] | oldboard.boards[BLACK][KNIGHT]
for cord in iterBits(wpieces):
sides = isolaniMask[FILE(cord)]
front = passedPawnMask[WHITE][cord]
if outpost[WHITE][cord] and not bpawns & sides & front and \
(not oldwpieces & bitPosArray[cord] or \
oldbpawns & sides & front):
yield 35, _("White has a new piece in outpost: %s") % reprCord[cord]
for cord in iterBits(bpieces):
sides = isolaniMask[FILE(cord)]
front = passedPawnMask[BLACK][cord]
if outpost[BLACK][cord] and not wpawns & sides & front and \
(not oldbpieces & bitPosArray[cord] or \
oldwpawns & sides & front):
yield 35, _("Black has a new piece in outpost: %s") % reprCord[cord]
def state_pawn (model, ply, phase):
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
color = 1-board.color
opcolor = 1-color
move = model.getMoveAtPly(ply-1).move
pawns = board.boards[color][PAWN]
oppawns = board.boards[opcolor][PAWN]
oldpawns = oldboard.boards[color][PAWN]
oldoppawns = oldboard.boards[opcolor][PAWN]
# Passed pawns
for cord in iterBits(pawns):
if not oppawns & passedPawnMask[color][cord]:
if color == WHITE:
frontCords = fromToRay[cord][cord|56]
else: frontCords = fromToRay[cord][cord&7]
if frontCords & pawns:
continue
# Was this a passed pawn before?
if oldpawns & bitPosArray[cord] and \
not oldoppawns & passedPawnMask[color][cord] and \
not frontCords & oldpawns:
continue
# Is this just a passed pawn that has been moved?
if TCORD(move) == cord:
frontCords |= bitPosArray[cord]
if not frontCords & oldpawns and \
not oldoppawns & passedPawnMask[color][FCORD(move)]:
continue
score = (passedScores[color][cord>>3] * phase)
yield score, _("%(color)s has a new passed pawn on %(cord)s") % {
'color': reprColor[color], 'cord': reprCord[cord]}
# Double pawns
found_doubles = []
found_halfopen_doubles = []
found_white_isolates = []
found_black_isolates = []
for file in range(8):
bits = fileBits[file]
count = bin(pawns & bits).count("1")
oldcount = bin(oldpawns & bits).count("1")
opcount = bin(oppawns & bits).count("1")
oldopcount = bin(oldoppawns & bits).count("1")
# Single pawn -> double pawns
if count > oldcount >= 1:
if not opcount:
found_halfopen_doubles.append(reprFile[file])
else: found_doubles.append(reprFile[file])
# Closed file double pawn -> half-open file double pawn
elif count > 1 and opcount == 0 and oldopcount > 0:
found_halfopen_doubles.append(reprFile[file])
# Isolated pawns
if color == WHITE:
wpawns = pawns
oldwpawns = oldpawns
bpawns = oppawns
oldbpawns = oldoppawns
else:
bpawns = pawns
oldbpawns = oldpawns
wpawns = oppawns
oldwpawns = oldoppawns
if wpawns & bits and not wpawns & isolaniMask[file] and \
(not oldwpawns & bits or oldwpawns & isolaniMask[file]):
found_white_isolates.append(reprFile[file])
if bpawns & bits and not bpawns & isolaniMask[file] and \
(not oldbpawns & bits or oldbpawns & isolaniMask[file]):
found_black_isolates.append(reprFile[file])
# We need to take care of 'worstcases' like: "got new double pawns in the a
# file, in the half-open b, c and d files and in the open e and f files"
doubles_count = len(found_doubles) + len(found_halfopen_doubles)
if doubles_count > 0:
parts = []
for type_, list_ in (("", found_doubles),
(_("half-open")+" ", found_halfopen_doubles)):
if len(list_) == 1:
parts.append(_("in the %(x)s%(y)s file") % {'x': type_, 'y': list_[0]})
elif len(list_) >= 2:
parts.append(_("in the %(x)s%(y)s files") % {'x': type_, 'y': join(list_)})
if doubles_count == 1:
s = _("%(color)s got a double pawn %(place)s")
else: s = _("%(color)s got new double pawns %(place)s")
yield (8+phase)*2*doubles_count, s % {'color': reprColor[color], 'place': join(parts)}
for (color_, list_) in ((WHITE, found_white_isolates),
(BLACK, found_black_isolates)):
if list_:
yield 20*len(list_), ngettext("%(color)s got an isolated pawn in the %(x)s file",
"%(color)s got isolated pawns in the %(x)s files",
len(list_)) % {'color': reprColor[color_], 'x': join(list_)}
# Stone wall
if stonewall[color] & pawns == stonewall[color] and \
stonewall[color] & oldpawns != stonewall[color]:
yield 10, _("%s moves pawns into stonewall formation") % reprColor[color]
def state_destroysCastling (model, ply, phase):
""" Does the move destroy the castling ability of the opponent """
# If the move is a castling, nobody will every care if the castling
# possibilities has changed
if FLAG(model.getMoveAtPly(ply-1).move) in (QUEEN_CASTLE, KING_CASTLE):
return
oldcastling = model.getBoardAtPly(ply-1).board.castling
castling = model.getBoardAtPly(ply).board.castling
if oldcastling & W_OOO and not castling & W_OOO:
if oldcastling & W_OO and not castling & W_OO:
yield 900/phase, _("%s can no longer castle") % reprColor[WHITE]
else: yield 400/phase, _("%s can no longer castle in queenside") % reprColor[WHITE]
elif oldcastling & W_OO and not castling & W_OO:
yield 500/phase, _("%s can no longer castle in kingside") % reprColor[WHITE]
if oldcastling & B_OOO and not castling & B_OOO:
if oldcastling & B_OO and not castling & B_OO:
yield 900/phase, _("%s can no longer castle") % reprColor[BLACK]
else: yield 400/phase, _("%s can no longer castle in queenside") % reprColor[BLACK]
elif oldcastling & B_OO and not castling & B_OO:
yield 500/phase, _("%s can no longer castle in kingside") % reprColor[BLACK]
def state_trappedBishops (model, ply, phase):
""" Check for bishops trapped at A2/H2/A7/H7 """
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
opcolor = board.color
color = 1-opcolor
move = model.getMoveAtPly(ply-1).move
tcord = TCORD(move)
# Only a pawn is able to trap a bishop
if board.arBoard[tcord] != PAWN:
return
if tcord == B3:
cord = A2
elif tcord == G3:
cord = H2
elif tcord == B6:
cord = A7
elif tcord == G6:
cord = H7
else:
return
s = leval.evalTrappedBishops (board, opcolor)
olds = leval.evalTrappedBishops (oldboard, opcolor)
# We have got more points -> We have trapped a bishop
if s > olds:
yield 300/phase, _("%(opcolor)s has a new trapped bishop on %(cord)s") % {
'opcolor': reprColor[opcolor], 'cord': reprCord[cord]}
def simple_tropism (model, ply, phase):
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
color = oldboard.color
move = model.getMoveAtPly(ply-1).move
fcord = FCORD(move)
tcord = TCORD(move)
arBoard = board.arBoard
if arBoard[tcord] != PAWN:
score = leval.evalKingTropism(board, color, phase)
oldscore = leval.evalKingTropism(oldboard, color, phase)
else:
if color == WHITE:
rank23 = brank67[BLACK]
else: rank23 = brank67[WHITE]
if bitPosArray[fcord] & rank23:
yield 2, _("develops a pawn: %s") % reprCord[tcord]
else: yield 1, _("brings a pawn closer to the backrow: %s") % \
reprCord[tcord]
return
king = board.kings[color]
opking = board.kings[1-color]
if score > oldscore:
# in FISCHERRANDOMCHESS unusual casting case the tcord is
# the involved rook's position, not the king's destination!
flag = move >> 12
if flag in (KING_CASTLE, QUEEN_CASTLE):
piece = KING
else:
piece = arBoard[tcord]
if phase >= 5 or distance[piece][fcord][opking] < \
distance[piece][fcord][king]:
yield score-oldscore, _("brings a %(piece)s closer to enemy king: %(cord)s") % {
'piece': reprPiece[piece], 'cord': reprCord[tcord]}
else:
yield (score-oldscore)*2, _("develops a %(piece)s: %(cord)s") % {
'piece': reprPiece[piece].lower(), 'cord': reprCord[tcord]}
def simple_activity (model, ply, phase):
board = model.getBoardAtPly(ply).board
oldboard = model.getBoardAtPly(ply-1).board
color = 1-board.color
move = model.getMoveAtPly(ply-1).move
fcord = FCORD(move)
tcord = TCORD(move)
board.setColor(1-board.color)
moves = len([m for m in genAllMoves(board) if FCORD(m) == tcord])
board.setColor(1-board.color)
oldmoves = len([m for m in genAllMoves(oldboard) if FCORD(m) == fcord])
if moves > oldmoves:
yield (moves-oldmoves)/2, _("places a %(piece)s more active: %(cord)s") % {
'piece': reprPiece[board.arBoard[tcord]].lower(), 'cord': reprCord[tcord]}
def tip_pawnStorm (model, ply, phase):
""" If players are castled in different directions we should storm in
opponent side """
if phase >= 6:
# We don't use this in endgame
return
board = model.getBoardAtPly(ply).board
#if not board.hasCastled[WHITE] or not board.hasCastled[BLACK]:
# # Only applies after castling for both sides
# return
wking = board.boards[WHITE][KING]
bking = board.boards[BLACK][KING]
wleft = bin(board.boards[WHITE][PAWN] & left).count("1")
wright = bin(board.boards[WHITE][PAWN] & right).count("1")
bleft = bin(board.boards[BLACK][PAWN] & left).count("1")
bright = bin(board.boards[BLACK][PAWN] & right).count("1")
if wking & left and bking & right:
if wright > bright:
yield (wright+3-bright)*10, _("White should do pawn storm in right")
elif bleft > wleft:
yield (bright+3-wright)*10, _("Black should do pawn storm in left")
if wking & right and bking & left:
if wleft > bleft:
yield (wleft+3-bleft)*10, _("White should do pawn storm in left")
if bright > wright:
yield (bleft+3-wleft)*10, _("Black should do pawn storm in right")
def tip_mobility (model, ply, phase):
board = model.getBoardAtPly(ply).board
colorBackup = board.color
# People need a chance to get developed
#if model.ply < 16:
# return
board.setColor(WHITE)
wmoves = len([move for move in genAllMoves(board) if \
KNIGHT <= board.arBoard[FCORD(move)] <= QUEEN and \
bitPosArray[TCORD(move)] & brank48[WHITE] and \
staticExchangeEvaluate(board, move) >= 0])
board.setColor(BLACK)
bmoves = len([move for move in genAllMoves(board) if \
KNIGHT <= board.arBoard[FCORD(move)] <= QUEEN and \
bitPosArray[TCORD(move)] & brank48[BLACK] and \
staticExchangeEvaluate(board, move) >= 0])
board.setColor(colorBackup)
if wmoves-phase >= (bmoves+1)*7:
yield wmoves-bmoves, _("Black has a rather cramped position")
elif wmoves-phase >= (bmoves+1)*3:
yield wmoves-bmoves, _("Black has a slightly cramped position")
elif bmoves-phase >= (wmoves+1)*7:
yield wmoves-bmoves, _("White has a rather cramped position")
elif bmoves-phase >= (wmoves+1)*3:
yield wmoves-bmoves, _("White has a slightly cramped position")
| gpl-3.0 |
amir-qayyum-khan/edx-platform | common/djangoapps/edxmako/request_context.py | 25 | 2455 | # Copyright (c) 2008 Mikeal Rogers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file has been modified by edX.org
"""
Methods for creating RequestContext for using with Mako templates.
"""
from django.conf import settings
from django.template import RequestContext
from django.template.context import _builtin_context_processors
from django.utils.module_loading import import_string
from util.request import safe_get_host
from crum import get_current_request
import request_cache
def get_template_context_processors():
"""
Returns the context processors defined in settings.TEMPLATES.
"""
context_processors = _builtin_context_processors
context_processors += tuple(settings.DEFAULT_TEMPLATE_ENGINE['OPTIONS']['context_processors'])
return tuple(import_string(path) for path in context_processors)
def get_template_request_context(request=None):
"""
Returns the template processing context to use for the current request,
or returns None if there is not a current request.
"""
if request is None:
request = get_current_request()
if request is None:
return None
request_cache_dict = request_cache.get_cache('edxmako')
cache_key = "request_context"
if cache_key in request_cache_dict:
return request_cache_dict[cache_key]
context = RequestContext(request)
context['is_secure'] = request.is_secure()
context['site'] = safe_get_host(request)
# This used to happen when a RequestContext object was initialized but was
# moved to a different part of the logic when template engines were introduced.
# Since we are not using template engines we do this here.
# https://github.com/django/django/commit/37505b6397058bcc3460f23d48a7de9641cd6ef0
for processor in get_template_context_processors():
context.update(processor(request))
request_cache_dict[cache_key] = context
return context
| agpl-3.0 |
akhuia/Capstone_SeaFlow | Code/Step 2 - For 50 files/Step_PCA_New.py | 1 | 1871 | import glob
import os.path
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pprint
def CreateList(Locale):
ListOfFiles = []
for name in glob.glob(Locale):
ListOfFiles.append(name)
return ListOfFiles
def Run_PCA(f):
FigFile = f[:-3]+"png"
if FigFile.find("Tok") == -1:
col = "blue"
else:
col = "red"
source = pd.read_csv(f)
newsource = source - source.mean()
datacov = newsource.cov()
eig_val_cov, eig_vec_cov = np.linalg.eig(datacov)
eig_pairs = [(np.abs(eig_val_cov[i]), eig_vec_cov[:,i]) for i in range(len(eig_val_cov))]
eig_pairs.sort()
eig_pairs.reverse()
matrix_w = np.hstack((eig_pairs[0][1].reshape(3,1), eig_pairs[1][1].reshape(3,1)))
transformed = newsource.as_matrix().dot(matrix_w)
plt.plot(transformed[0:len(source),0],transformed[0:len(source),1],\
'o', markersize=7, color=col, alpha=0.5, label='class1')
return ((eig_pairs[0]+eig_pairs[1]), f[f.find("armb"):])
def main():
Images = []
MatrixDict = {}
MatrixDict['File'] = []
MatrixDict['Eigenvalue1'] = []
MatrixDict['Eigenvector1'] = []
MatrixDict['Eigenvalue2'] = []
MatrixDict['Eigenvector2'] = []
path = "C:\Users\NYU\SeaFlow2\*"
FileList = CreateList(path)
for i in FileList:
Images.append(Run_PCA(i))
for i,j in enumerate(Images):
MatrixDict['File'].append(Images[i][1])
MatrixDict['Eigenvalue1'].append(Images[i][0][0])
MatrixDict['Eigenvector1'].append(Images[i][0][1])
MatrixDict['Eigenvalue2'].append(Images[i][0][2])
MatrixDict['Eigenvector2'].append(Images[i][0][3])
FinalMatrix = pd.DataFrame(MatrixDict, columns=['File','Eigenvalue1','Eigenvalue2','Eigenvector1','Eigenvector2'])
print FinalMatrix
if __name__ == "__main__":
main()
| mit |
apaleyes/mxnet | example/reinforcement-learning/a3c/launcher.py | 15 | 4541 | """Submission job for local jobs."""
# pylint: disable=invalid-name
from __future__ import absolute_import
import sys
import os
import subprocess
import logging
from threading import Thread
import argparse
import signal
sys.path.append(os.path.join(os.environ['HOME'], "mxnet/dmlc-core/tracker"))
sys.path.append(os.path.join('/scratch', "mxnet/dmlc-core/tracker"))
from dmlc_tracker import tracker
keepalive = """
nrep=0
rc=254
while [ $rc -ne 0 ];
do
export DMLC_NUM_ATTEMPT=$nrep
%s
rc=$?;
nrep=$((nrep+1));
done
"""
def exec_cmd(cmd, role, taskid, pass_env):
"""Execute the command line command."""
if cmd[0].find('/') == -1 and os.path.exists(cmd[0]) and os.name != 'nt':
cmd[0] = './' + cmd[0]
cmd = ' '.join(cmd)
env = os.environ.copy()
for k, v in pass_env.items():
env[k] = str(v)
env['DMLC_TASK_ID'] = str(taskid)
env['DMLC_ROLE'] = role
env['DMLC_JOB_CLUSTER'] = 'local'
ntrial = 0
while True:
if os.name == 'nt':
env['DMLC_NUM_ATTEMPT'] = str(ntrial)
ret = subprocess.call(cmd, shell=True, env=env)
if ret != 0:
ntrial += 1
continue
else:
bash = cmd
ret = subprocess.call(bash, shell=True, executable='bash', env=env)
if ret == 0:
logging.debug('Thread %d exit with 0', taskid)
return
else:
if os.name == 'nt':
sys.exit(-1)
else:
raise RuntimeError('Get nonzero return code=%d' % ret)
def submit(args):
gpus = args.gpus.strip().split(',')
"""Submit function of local jobs."""
def mthread_submit(nworker, nserver, envs):
"""
customized submit script, that submit nslave jobs, each must contain args as parameter
note this can be a lambda function containing additional parameters in input
Parameters
----------
nworker: number of slave process to start up
nserver: number of server nodes to start up
envs: enviroment variables to be added to the starting programs
"""
procs = {}
for i, gpu in enumerate(gpus):
for j in range(args.num_threads):
procs[i] = Thread(target=exec_cmd, args=(args.command + ['--gpus=%s'%gpu], 'worker', i*args.num_threads+j, envs))
procs[i].setDaemon(True)
procs[i].start()
for i in range(len(gpus)*args.num_threads, len(gpus)*args.num_threads + nserver):
procs[i] = Thread(target=exec_cmd, args=(args.command, 'server', i, envs))
procs[i].setDaemon(True)
procs[i].start()
# call submit, with nslave, the commands to run each job and submit function
tracker.submit(args.num_threads*len(gpus), args.num_servers, fun_submit=mthread_submit,
pscmd=(' '.join(args.command)))
def signal_handler(signal, frame):
logging.info('Stop launcher')
sys.exit(0)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Launch a distributed job')
parser.add_argument('--gpus', type=str, help='the gpus will be used, e.g "0,1,2,3"')
parser.add_argument('-n', '--num-threads', required=True, type=int,
help = 'number of threads per gpu')
parser.add_argument('-s', '--num-servers', type=int,
help = 'number of server nodes to be launched, \
in default it is equal to NUM_WORKERS')
parser.add_argument('-H', '--hostfile', type=str,
help = 'the hostfile of slave machines which will run \
the job. Required for ssh and mpi launcher')
parser.add_argument('--sync-dst-dir', type=str,
help = 'if specificed, it will sync the current \
directory into slave machines\'s SYNC_DST_DIR if ssh \
launcher is used')
parser.add_argument('--launcher', type=str, default='local',
choices = ['local', 'ssh', 'mpi', 'sge', 'yarn'],
help = 'the launcher to use')
parser.add_argument('command', nargs='+',
help = 'command for launching the program')
args, unknown = parser.parse_known_args()
args.command += unknown
if args.num_servers is None:
args.num_servers = args.num_threads * len(args.gpus.strip().split(','))
signal.signal(signal.SIGINT, signal_handler)
submit(args)
| apache-2.0 |
yephper/django | django/contrib/gis/gdal/datasource.py | 1 | 5006 | """
DataSource is a wrapper for the OGR Data Source object, which provides
an interface for reading vector geometry data from many different file
formats (including ESRI shapefiles).
When instantiating a DataSource object, use the filename of a
GDAL-supported data source. For example, a SHP file or a
TIGER/Line file from the government.
The ds_driver keyword is used internally when a ctypes pointer
is passed in directly.
Example:
ds = DataSource('/home/foo/bar.shp')
for layer in ds:
for feature in layer:
# Getting the geometry for the feature.
g = feature.geom
# Getting the 'description' field for the feature.
desc = feature['description']
# We can also increment through all of the fields
# attached to this feature.
for field in feature:
# Get the name of the field (e.g. 'description')
nm = field.name
# Get the type (integer) of the field, e.g. 0 => OFTInteger
t = field.type
# Returns the value the field; OFTIntegers return ints,
# OFTReal returns floats, all else returns string.
val = field.value
"""
from ctypes import byref
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException, OGRIndexError
from django.contrib.gis.gdal.layer import Layer
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
from django.utils.six.moves import range
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_DS_* routines are relevant here.
class DataSource(GDALBase):
"Wraps an OGR Data Source object."
def __init__(self, ds_input, ds_driver=False, write=False, encoding='utf-8'):
# The write flag.
if write:
self._write = 1
else:
self._write = 0
# See also http://trac.osgeo.org/gdal/wiki/rfc23_ogr_unicode
self.encoding = encoding
Driver.ensure_registered()
if isinstance(ds_input, six.string_types):
# The data source driver is a void pointer.
ds_driver = Driver.ptr_type()
try:
# OGROpen will auto-detect the data source type.
ds = capi.open_ds(force_bytes(ds_input), self._write, byref(ds_driver))
except GDALException:
# Making the error message more clear rather than something
# like "Invalid pointer returned from OGROpen".
raise GDALException('Could not open the datasource at "%s"' % ds_input)
elif isinstance(ds_input, self.ptr_type) and isinstance(ds_driver, Driver.ptr_type):
ds = ds_input
else:
raise GDALException('Invalid data source input type: %s' % type(ds_input))
if ds:
self.ptr = ds
self.driver = Driver(ds_driver)
else:
# Raise an exception if the returned pointer is NULL
raise GDALException('Invalid data source file "%s"' % ds_input)
def __del__(self):
"Destroys this DataStructure object."
try:
capi.destroy_ds(self._ptr)
except (AttributeError, TypeError):
pass # Some part might already have been garbage collected
def __iter__(self):
"Allows for iteration over the layers in a data source."
for i in range(self.layer_count):
yield self[i]
def __getitem__(self, index):
"Allows use of the index [] operator to get a layer at the index."
if isinstance(index, six.string_types):
l = capi.get_layer_by_name(self.ptr, force_bytes(index))
if not l:
raise OGRIndexError('invalid OGR Layer name given: "%s"' % index)
elif isinstance(index, int):
if index < 0 or index >= self.layer_count:
raise OGRIndexError('index out of range')
l = capi.get_layer(self._ptr, index)
else:
raise TypeError('Invalid index type: %s' % type(index))
return Layer(l, self)
def __len__(self):
"Returns the number of layers within the data source."
return self.layer_count
def __str__(self):
"Returns OGR GetName and Driver for the Data Source."
return '%s (%s)' % (self.name, str(self.driver))
@property
def layer_count(self):
"Returns the number of layers in the data source."
return capi.get_layer_count(self._ptr)
@property
def name(self):
"Returns the name of the data source."
name = capi.get_ds_name(self._ptr)
return force_text(name, self.encoding, strings_only=True)
| bsd-3-clause |
akatsoulas/mozmoderator | moderator/moderate/forms.py | 2 | 4783 | from dal import autocomplete
from django import forms
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.validators import MaxLengthValidator, MinLengthValidator
from .models import Event, Question
QUESTION = "Ask your question in 280 characters"
ANSWER = "Reply to question in 280 characters"
CONTACT_INFO = "Optional: Please supply a valid email address."
REJECTION_REASON = (
"Reply to the submitter on why this question was moderated in 512 characters."
)
class QuestionForm(forms.ModelForm):
"""Question Form."""
question = forms.CharField(
validators=[MaxLengthValidator(280), MinLengthValidator(10)],
max_length=280,
widget=forms.TextInput(
attrs={
"placeholder": QUESTION,
"class": "form-control",
"required": "required",
}
),
)
answer = forms.CharField(
validators=[MaxLengthValidator(280)],
required=False,
max_length=280,
widget=forms.TextInput(attrs={"placeholder": ANSWER, "class": "form-control"}),
)
submitter_contact_info = forms.CharField(
required=False,
widget=forms.TextInput(
attrs={"placeholder": CONTACT_INFO, "class": "form-control"}
),
)
rejection_reason = forms.CharField(
required=False,
widget=forms.Textarea(
attrs={"placeholder": REJECTION_REASON, "class": "form-control"}
),
)
def __init__(self, *args, **kwargs):
self.is_locked = kwargs.pop("is_locked", False)
super(QuestionForm, self).__init__(*args, **kwargs)
if self.instance.id:
self.fields["question"].required = False
def clean(self):
cdata = super(QuestionForm, self).clean()
if self.is_locked and (
cdata.get("is_approved") or cdata.get("rejection_reason")
):
raise ValidationError(
"The question can only be moderated by event moderators"
)
if self.instance.id:
cdata["question"] = self.instance.question
# Raise an error if there is no answer
if not cdata["answer"] and self.is_locked:
msg = "Please provide a reply."
self._errors["answer"] = self.error_class([msg])
return cdata
# Force an empty answer when saving a new form
cdata["answer"] = ""
return cdata
class Meta:
model = Question
fields = [
"question",
"answer",
"is_anonymous",
"submitter_contact_info",
"rejection_reason",
]
widgets = {"is_anonymous": forms.CheckboxInput()}
class EventForm(forms.ModelForm):
"""Question Form."""
moderators = forms.ModelMultipleChoiceField(
queryset=User.objects.all(),
widget=autocomplete.ModelSelect2Multiple(url="users-autocomplete"),
required=False,
)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
super(EventForm, self).__init__(*args, **kwargs)
if self.instance.id:
self.fields["name"].required = True
else:
self.fields["moderators"].initial = User.objects.filter(id=self.user.pk)
def clean(self):
"""
Clean method to check post data for nda events,
and moderated events with no moderators.
"""
cdata = super(EventForm, self).clean()
# Do not allow non-nda members to submit NDA events.
if not self.user.userprofile.is_nda_member and cdata["is_nda"]:
msg = "Only members of the NDA group can create NDA events."
raise forms.ValidationError(msg)
# Don't allow non-superusers to modify moderation status or moderators
if not self.user.is_superuser:
if self.instance.id:
cdata["is_moderated"] = self.instance.is_moderated
cdata["moderators"] = self.instance.moderators.all()
else:
cdata["is_moderated"] = False
del cdata["moderators"]
# Require moderators to be set if the event is moderated
if cdata["is_moderated"] and not cdata["moderators"]:
msg = "A moderated event requires moderators."
raise forms.ValidationError(msg)
return cdata
class Meta:
model = Event
fields = ["name", "is_nda", "body", "is_moderated", "moderators"]
widgets = (
{
"is_nda": forms.CheckboxInput(),
"is_moderated": forms.CheckboxInput(),
},
)
labels = {
"name": "Event title",
}
| agpl-3.0 |
gnuradio/gnuradio | gnuradio-runtime/python/gnuradio/gr/top_block.py | 5 | 4167 | #
# Copyright 2007,2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from .gr_python import (top_block_pb,
top_block_wait_unlocked, top_block_run_unlocked,
top_block_start_unlocked, top_block_stop_unlocked,
top_block_unlock_unlocked) #, dot_graph_tb)
from .hier_block2 import hier_block2
import threading
from .hier_block2 import hier_block2
class _top_block_waiter(threading.Thread):
"""
This kludge allows ^C to interrupt top_block.run and top_block.wait
The problem that we are working around is that Python only services
signals (e.g., KeyboardInterrupt) in its main thread. If the main
thread is blocked in our C++ version of wait, even though Python's
SIGINT handler fires, and even though there may be other python
threads running, no one will know. Thus instead of directly waiting
in the thread that calls wait (which is likely to be the Python main
thread), we create a separate thread that does the blocking wait,
and then use the thread that called wait to do a slow poll of an
event queue. That thread, which is executing "wait" below is
interruptible, and if it sees a KeyboardInterrupt, executes a stop
on the top_block, then goes back to waiting for it to complete.
This ensures that the unlocked wait that was in progress (in the
_top_block_waiter thread) can complete, release its mutex and back
out. If we don't do that, we are never able to clean up, and nasty
things occur like leaving the USRP transmitter sending a carrier.
See also top_block.wait (below), which uses this class to implement
the interruptible wait.
"""
def __init__(self, tb):
threading.Thread.__init__(self)
self.setDaemon(1)
self.tb = tb
self.event = threading.Event()
self.start()
def run(self):
top_block_wait_unlocked(self.tb)
self.event.set()
def wait(self, handle_sigint=True):
try:
while not self.event.wait(0.1):
pass
except KeyboardInterrupt:
if not handle_sigint:
raise
self.tb.stop()
self.wait()
#
# This makes a 'has-a' relationship to look like an 'is-a' one.
#
# It allows Python classes to subclass this one, while passing through
# method calls to the C++ class shared pointer from SWIG.
#
# It also allows us to intercept method calls if needed.
#
# This allows the 'run_locked' methods, which are defined in gr_top_block.i,
# to release the Python global interpreter lock before calling the actual
# method in gr_top_block
#
class top_block(hier_block2):
"""
Top-level hierarchical block representing a flow-graph.
This is a python wrapper around the C++ implementation to allow
python subclassing.
"""
def __init__(self, name="top_block", catch_exceptions=True):
"""
Create a top block with a given name.
"""
# not calling hier_block2.__init__, we set our own _impl
self._impl = top_block_pb(name, catch_exceptions)
self.handle_sigint = True
def start(self, max_noutput_items=10000000):
"""
Start the flowgraph with the given number of output items and return.
"""
top_block_start_unlocked(self._impl, max_noutput_items)
def stop(self):
"""
Stop the flowgraph
"""
top_block_stop_unlocked(self._impl)
def run(self, max_noutput_items=10000000):
"""
Start the flowgraph with the given number of output items and wait.
"""
self.start(max_noutput_items)
self.wait()
def unlock(self):
"""
Release lock and continue execution of flow-graph.
"""
top_block_unlock_unlocked(self._impl)
def wait(self):
"""
Wait for the flowgraph to finish running
"""
_top_block_waiter(self._impl).wait(self.handle_sigint)
# def dot_graph(self):
# """
# Return graph representation in dot language
# """
# return dot_graph_tb(self._impl)
| gpl-3.0 |
DR08/mxnet | example/reinforcement-learning/parallel_actor_critic/model.py | 24 | 5039 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from itertools import chain
import numpy as np
import scipy.signal
import mxnet as mx
class Agent(object):
def __init__(self, input_size, act_space, config):
super(Agent, self).__init__()
self.input_size = input_size
self.num_envs = config.num_envs
self.ctx = config.ctx
self.act_space = act_space
self.config = config
# Shared network.
net = mx.sym.Variable('data')
net = mx.sym.FullyConnected(
data=net, name='fc1', num_hidden=config.hidden_size, no_bias=True)
net = mx.sym.Activation(data=net, name='relu1', act_type="relu")
# Policy network.
policy_fc = mx.sym.FullyConnected(
data=net, name='policy_fc', num_hidden=act_space, no_bias=True)
policy = mx.sym.SoftmaxActivation(data=policy_fc, name='policy')
policy = mx.sym.clip(data=policy, a_min=1e-5, a_max=1 - 1e-5)
log_policy = mx.sym.log(data=policy, name='log_policy')
out_policy = mx.sym.BlockGrad(data=policy, name='out_policy')
# Negative entropy.
neg_entropy = policy * log_policy
neg_entropy = mx.sym.MakeLoss(
data=neg_entropy, grad_scale=config.entropy_wt, name='neg_entropy')
# Value network.
value = mx.sym.FullyConnected(data=net, name='value', num_hidden=1)
self.sym = mx.sym.Group([log_policy, value, neg_entropy, out_policy])
self.model = mx.mod.Module(self.sym, data_names=('data',),
label_names=None)
self.paralell_num = config.num_envs * config.t_max
self.model.bind(
data_shapes=[('data', (self.paralell_num, input_size))],
label_shapes=None,
grad_req="write")
self.model.init_params(config.init_func)
optimizer_params = {'learning_rate': config.learning_rate,
'rescale_grad': 1.0}
if config.grad_clip:
optimizer_params['clip_gradient'] = config.clip_magnitude
self.model.init_optimizer(
kvstore='local', optimizer=config.update_rule,
optimizer_params=optimizer_params)
def act(self, ps):
us = np.random.uniform(size=ps.shape[0])[:, np.newaxis]
as_ = (np.cumsum(ps, axis=1) > us).argmax(axis=1)
return as_
def train_step(self, env_xs, env_as, env_rs, env_vs):
# NOTE(reed): Reshape to set the data shape.
self.model.reshape([('data', (len(env_xs), self.input_size))])
xs = mx.nd.array(env_xs, ctx=self.ctx)
as_ = np.array(list(chain.from_iterable(env_as)))
# Compute discounted rewards and advantages.
advs = []
gamma, lambda_ = self.config.gamma, self.config.lambda_
for i in xrange(len(env_vs)):
# Compute advantages using Generalized Advantage Estimation;
# see eqn. (16) of [Schulman 2016].
delta_t = (env_rs[i] + gamma*np.array(env_vs[i][1:]) -
np.array(env_vs[i][:-1]))
advs.extend(self._discount(delta_t, gamma * lambda_))
# Negative generalized advantage estimations.
neg_advs_v = -np.asarray(advs)
# NOTE(reed): Only keeping the grads for selected actions.
neg_advs_np = np.zeros((len(advs), self.act_space), dtype=np.float32)
neg_advs_np[np.arange(neg_advs_np.shape[0]), as_] = neg_advs_v
neg_advs = mx.nd.array(neg_advs_np, ctx=self.ctx)
# NOTE(reed): The grads of values is actually negative advantages.
v_grads = mx.nd.array(self.config.vf_wt * neg_advs_v[:, np.newaxis],
ctx=self.ctx)
data_batch = mx.io.DataBatch(data=[xs], label=None)
self._forward_backward(data_batch=data_batch,
out_grads=[neg_advs, v_grads])
self._update_params()
def _discount(self, x, gamma):
return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1]
def _forward_backward(self, data_batch, out_grads=None):
self.model.forward(data_batch, is_train=True)
self.model.backward(out_grads=out_grads)
def _update_params(self):
self.model.update()
self.model._sync_params_from_devices()
| apache-2.0 |
SauloAislan/ironic | ironic/drivers/modules/console_utils.py | 4 | 13399 | # coding=utf-8
# Copyright 2014 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Ironic console utilities.
"""
import errno
import os
import signal
import subprocess
import time
from ironic_lib import utils as ironic_utils
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import fileutils
from oslo_utils import netutils
import psutil
from ironic.common import exception
from ironic.common.i18n import _
from ironic.conf import CONF
LOG = logging.getLogger(__name__)
def _get_console_pid_dir():
"""Return the directory for the pid file."""
return CONF.console.terminal_pid_dir or CONF.tempdir
def _ensure_console_pid_dir_exists():
"""Ensure that the console PID directory exists
Checks that the directory for the console PID file exists
and if not, creates it.
:raises: ConsoleError if the directory doesn't exist and cannot be created
"""
dir = _get_console_pid_dir()
if not os.path.exists(dir):
try:
os.makedirs(dir)
except OSError as exc:
msg = (_("Cannot create directory '%(path)s' for console PID file."
" Reason: %(reason)s.") % {'path': dir, 'reason': exc})
LOG.error(msg)
raise exception.ConsoleError(message=msg)
def _get_console_pid_file(node_uuid):
"""Generate the pid file name to hold the terminal process id."""
pid_dir = _get_console_pid_dir()
name = "%s.pid" % node_uuid
path = os.path.join(pid_dir, name)
return path
def _get_console_pid(node_uuid):
"""Get the terminal process id from pid file."""
pid_path = _get_console_pid_file(node_uuid)
try:
with open(pid_path, 'r') as f:
pid_str = f.readline()
return int(pid_str)
except (IOError, ValueError):
raise exception.NoConsolePid(pid_path=pid_path)
def _stop_console(node_uuid):
"""Close the serial console for a node
Kills the console process and deletes the PID file.
:param node_uuid: the UUID of the node
:raises: NoConsolePid if no console PID was found
:raises: ConsoleError if unable to stop the console process
"""
try:
console_pid = _get_console_pid(node_uuid)
os.kill(console_pid, signal.SIGTERM)
except OSError as exc:
if exc.errno != errno.ESRCH:
msg = (_("Could not stop the console for node '%(node)s'. "
"Reason: %(err)s.") % {'node': node_uuid, 'err': exc})
raise exception.ConsoleError(message=msg)
else:
LOG.warning("Console process for node %s is not running "
"but pid file exists.", node_uuid)
finally:
ironic_utils.unlink_without_raise(_get_console_pid_file(node_uuid))
def make_persistent_password_file(path, password):
"""Writes a file containing a password until deleted."""
try:
fileutils.delete_if_exists(path)
with open(path, 'wb') as file:
os.chmod(path, 0o600)
file.write(password.encode())
return path
except Exception as e:
fileutils.delete_if_exists(path)
raise exception.PasswordFileFailedToCreate(error=e)
def get_shellinabox_console_url(port):
"""Get a url to access the console via shellinaboxd.
:param port: the terminal port for the node.
"""
console_host = CONF.my_ip
if netutils.is_valid_ipv6(console_host):
console_host = '[%s]' % console_host
scheme = 'https' if CONF.console.terminal_cert_dir else 'http'
return '%(scheme)s://%(host)s:%(port)s' % {'scheme': scheme,
'host': console_host,
'port': port}
def start_shellinabox_console(node_uuid, port, console_cmd):
"""Open the serial console for a node.
:param node_uuid: the uuid for the node.
:param port: the terminal port for the node.
:param console_cmd: the shell command that gets the console.
:raises: ConsoleError if the directory for the PID file cannot be created
or an old process cannot be stopped.
:raises: ConsoleSubprocessFailed when invoking the subprocess failed.
"""
# make sure that the old console for this node is stopped
# and the files are cleared
try:
_stop_console(node_uuid)
except exception.NoConsolePid:
pass
_ensure_console_pid_dir_exists()
pid_file = _get_console_pid_file(node_uuid)
# put together the command and arguments for invoking the console
args = []
args.append(CONF.console.terminal)
if CONF.console.terminal_cert_dir:
args.append("-c")
args.append(CONF.console.terminal_cert_dir)
else:
args.append("-t")
args.append("-p")
args.append(str(port))
args.append("--background=%s" % pid_file)
args.append("-s")
args.append(console_cmd)
# run the command as a subprocess
try:
LOG.debug('Running subprocess: %s', ' '.join(args))
# use pipe here to catch the error in case shellinaboxd
# failed to start.
obj = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except (OSError, ValueError) as e:
error = _("%(exec_error)s\n"
"Command: %(command)s") % {'exec_error': str(e),
'command': ' '.join(args)}
LOG.warning(error)
raise exception.ConsoleSubprocessFailed(error=error)
def _wait(node_uuid, popen_obj):
locals['returncode'] = popen_obj.poll()
# check if the console pid is created and the process is running.
# if it is, then the shellinaboxd is invoked successfully as a daemon.
# otherwise check the error.
if locals['returncode'] is not None:
if (locals['returncode'] == 0 and os.path.exists(pid_file) and
psutil.pid_exists(_get_console_pid(node_uuid))):
raise loopingcall.LoopingCallDone()
else:
(stdout, stderr) = popen_obj.communicate()
locals['errstr'] = _(
"Command: %(command)s.\n"
"Exit code: %(return_code)s.\n"
"Stdout: %(stdout)r\n"
"Stderr: %(stderr)r") % {
'command': ' '.join(args),
'return_code': locals['returncode'],
'stdout': stdout,
'stderr': stderr}
LOG.warning(locals['errstr'])
raise loopingcall.LoopingCallDone()
if (time.time() > expiration):
locals['errstr'] = _("Timeout while waiting for console subprocess"
"to start for node %s.") % node_uuid
LOG.warning(locals['errstr'])
raise loopingcall.LoopingCallDone()
locals = {'returncode': None, 'errstr': ''}
expiration = time.time() + CONF.console.subprocess_timeout
timer = loopingcall.FixedIntervalLoopingCall(_wait, node_uuid, obj)
timer.start(interval=CONF.console.subprocess_checking_interval).wait()
if locals['errstr']:
raise exception.ConsoleSubprocessFailed(error=locals['errstr'])
def stop_shellinabox_console(node_uuid):
"""Close the serial console for a node.
:param node_uuid: the UUID of the node
:raises: ConsoleError if unable to stop the console process
"""
try:
_stop_console(node_uuid)
except exception.NoConsolePid:
LOG.warning("No console pid found for node %s while trying to "
"stop shellinabox console.", node_uuid)
def get_socat_console_url(port):
"""Get a URL to access the console via socat.
:param port: the terminal port (integer) for the node
:return: an access URL to the socat console of the node
"""
console_host = CONF.console.socat_address
if netutils.is_valid_ipv6(console_host):
console_host = '[%s]' % console_host
return 'tcp://%(host)s:%(port)s' % {'host': console_host,
'port': port}
def start_socat_console(node_uuid, port, console_cmd):
"""Open the serial console for a node.
:param node_uuid: the uuid of the node
:param port: the terminal port for the node
:param console_cmd: the shell command that will be executed by socat to
establish console to the node
:raises ConsoleError: if the directory for the PID file or the PID file
cannot be created
:raises ConsoleSubprocessFailed: when invoking the subprocess failed
"""
# Make sure that the old console for this node is stopped.
# If no console is running, we may get exception NoConsolePid.
try:
_stop_console(node_uuid)
except exception.NoConsolePid:
pass
_ensure_console_pid_dir_exists()
pid_file = _get_console_pid_file(node_uuid)
# put together the command and arguments for invoking the console
args = ['socat']
# set timeout check for user's connection. If the timeout value
# is not 0, after timeout seconds of inactivity on the client side,
# the connection will be closed.
if CONF.console.terminal_timeout > 0:
args.append('-T%d' % CONF.console.terminal_timeout)
args.append('-L%s' % pid_file)
console_host = CONF.console.socat_address
if netutils.is_valid_ipv6(console_host):
arg = 'TCP6-LISTEN:%(port)s,bind=[%(host)s],reuseaddr'
else:
arg = 'TCP4-LISTEN:%(port)s,bind=%(host)s,reuseaddr'
args.append(arg % {'host': console_host,
'port': port})
args.append('EXEC:"%s",pty,stderr' % console_cmd)
# run the command as a subprocess
try:
LOG.debug('Running subprocess: %s', ' '.join(args))
# Use pipe here to catch the error in case socat
# fails to start. Note that socat uses stdout as transferring
# data, so we only capture stderr for checking if it fails.
obj = subprocess.Popen(args, stderr=subprocess.PIPE)
except (OSError, ValueError) as e:
error = _("%(exec_error)s\n"
"Command: %(command)s") % {'exec_error': str(e),
'command': ' '.join(args)}
LOG.exception('Unable to start socat console')
raise exception.ConsoleSubprocessFailed(error=error)
# NOTE: we need to check if socat fails to start here.
# If it starts successfully, it will run in non-daemon mode and
# will not return until the console session is stopped.
def _wait(node_uuid, popen_obj):
wait_state['returncode'] = popen_obj.poll()
# socat runs in non-daemon mode, so it should not return now
if wait_state['returncode'] is None:
# If the pid file is created and the process is running,
# we stop checking it periodically.
if (os.path.exists(pid_file) and
psutil.pid_exists(_get_console_pid(node_uuid))):
raise loopingcall.LoopingCallDone()
else:
# socat returned, it failed to start.
# We get the error (out should be None in this case).
(_out, err) = popen_obj.communicate()
wait_state['errstr'] = _(
"Command: %(command)s.\n"
"Exit code: %(return_code)s.\n"
"Stderr: %(error)r") % {
'command': ' '.join(args),
'return_code': wait_state['returncode'],
'error': err}
LOG.error(wait_state['errstr'])
raise loopingcall.LoopingCallDone()
if time.time() > expiration:
wait_state['errstr'] = (_("Timeout while waiting for console "
"subprocess to start for node %s.") %
node_uuid)
LOG.error(wait_state['errstr'])
raise loopingcall.LoopingCallDone()
wait_state = {'returncode': None, 'errstr': ''}
expiration = time.time() + CONF.console.subprocess_timeout
timer = loopingcall.FixedIntervalLoopingCall(_wait, node_uuid, obj)
timer.start(interval=CONF.console.subprocess_checking_interval).wait()
if wait_state['errstr']:
raise exception.ConsoleSubprocessFailed(error=wait_state['errstr'])
def stop_socat_console(node_uuid):
"""Close the serial console for a node.
:param node_uuid: the UUID of the node
:raise ConsoleError: if unable to stop the console process
"""
try:
_stop_console(node_uuid)
except exception.NoConsolePid:
LOG.warning("No console pid found for node %s while trying to "
"stop socat console.", node_uuid)
| apache-2.0 |
macbre/wbc.macbre.net | app/tests/models/test_document_model.py | 1 | 3237 | # -*- coding: utf-8 -*-
import unittest
from wbc import app
from wbc.models import DocumentModel
from ..helpers import get_sphinx_mock
class DocumentModelTest(unittest.TestCase):
def setUp(self):
chapter = u'POWSTANIE WIELKOPOLSKIE NA POCZTÓWKACH, PLAKATACH, DYPLOMACH I ULOTKACH (DO 1921 R.)'
self.model = DocumentModel(**{
'id': 6224,
'issue_id': 123,
'read_time': 116,
'chapter': chapter,
'content': chapter + 'foo\n\n\n\n<bar>'
})
def test_accessor(self):
assert self.model['id'] == 6224
assert self.model['chapter'] == \
u'POWSTANIE WIELKOPOLSKIE NA POCZTÓWKACH, PLAKATACH, DYPLOMACH I ULOTKACH (DO 1921 R.)'
def test_urls(self):
with app.test_request_context():
assert self.model.get_full_url() == \
'/document/6224/powstanie-wielkopolskie-na-pocztowkach-plakatach-dyplomach-i-ulotkach-do-1921-r.html'
assert self.model.get_djvu_url() == \
'http://www.wbc.poznan.pl/dlibra/doccontent?id=123'
assert self.model.get_json_url() == \
'/api/v1/documents/6224'
assert self.model.get_txt_url() == \
'/api/v1/documents/6224.txt'
def test_long_urls(self):
with app.test_request_context():
model = DocumentModel(**{
'id': 6224,
'issue_id': 123,
'chapter': u'foobar' * 1024,
})
assert model.get_full_url() == \
'/document/6224/foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoob.html'
def test_get_read_time(self):
assert self.model.get_read_time() == 116
def test_get_content(self):
assert self.model._get_content() == 'foo\n\n\n\n<bar>'
def test_get_html_content(self):
assert self.model.get_html_content() == '<p>foo</p>\n\n<p><bar></p>'
def test_get_intro(self):
with app.test_request_context():
model = DocumentModel(**{
'id': 6224,
'issue_id': 123,
'chapter': u'DWIE BASZTY WEWNĘTRZNEGO MURU OBRONNEGO PRZY UL. MASZTALARSKIEJ',
'content': u'Niewielu spacerujących ulicą Masztalarską wie, że po jej północnej stronie zachowały się do dzisiaj dwie baszty oraz odcinek muru zewnętrznego stanowiące pozostałości północno-zachodniego pierścienia średniowiecznych murów obronnych Poznania.',
})
assert model.get_intro(75) == u'Niewielu spacerujących ulicą Masztalarską wie, że po jej północnej...'
@staticmethod
def test_new_from_id():
with app.test_request_context():
query = """
SELECT id, title AS issue_name, document_id AS issue_id, published_year, read_time, chapter, content FROM wbc WHERE id = 453
""".strip()
get_sphinx_mock(query, [{
'id': 453,
'issue_id': 123
}])
assert DocumentModel.new_from_id(453)['issue_id'] == 123
get_sphinx_mock(query, [])
assert DocumentModel.new_from_id(453) is None
| mit |
lithint/odoo | scs_ccleen_products/models/sale_order.py | 1 | 1258 | # -*- coding: utf-8 -*-
# See LICENSE file for full copyright and licensing details.
"""Sale Order Related models."""
from odoo import fields, models, api, _
class SaleOrder(models.Model):
"""Sale Order model."""
_inherit = 'sale.order'
quote_number = fields.Char(string="Quotation Number", copy=False,
default=lambda self: _('New'))
client_order_ref = fields.Char(string="Customer Reference")
@api.model
def create(self, vals):
"""Override create method to update the name (sequence)."""
seq_obj = self.env['ir.sequence']
quote_sequence = \
self.env.ref('scs_ccleen_products.seq_gentec_quotation_order')
sale_order = super(SaleOrder, self).create(vals)
if quote_sequence and \
vals.get('quote_sequence', _('New')) == _('New'):
if 'company_id' in vals:
sale_order.quote_number = seq_obj.\
with_context(force_company=vals['company_id']).\
next_by_code('quotation.order.sequence') or _('New')
else:
sale_order.quote_number = seq_obj.\
next_by_code('quotation.order.sequence') or _('New')
return sale_order
| agpl-3.0 |
zorilla/python-automation | python/config.py | 1 | 4696 | #!/usr/bin/env python
#
# Common configuration and logging
#
# mdeacon@zorillaeng.com
#
import argparse
import ConfigParser
from os import path, access, R_OK
import os
import sys
import time
import paramiko
import platform
import glob2
import shutil
import logging
import logging.handlers
from logging.handlers import RotatingFileHandler
def get_data_path():
"""Return the package 'data' path
"""
path = os.path.dirname(os.path.realpath(__file__))
return os.path.join(path, 'data')
def get_log_path():
"""Return a path where logs go
"""
home = path.expanduser("~")
return os.path.join(home, '.zorilla', 'log')
def get_config_path():
"""Return a path where config files live
"""
home = path.expanduser("~")
return os.path.join(home, '.zorilla', 'config')
def init_config(in_defaults, section = 'Defaults'):
"""Common config initialization for all test utilities
"""
config_filename = 'default.cfg'
# Check for existence of config directory and create if not found
config_path = get_config_path()
data_path = get_data_path()
if not os.path.exists(config_path):
print 'Creating config path % s' % config_path
os.makedirs(config_path)
shutil.copyfile(os.path.join(data_path, config_filename),
os.path.join(config_path, config_filename))
# Parse any conf_file specification
# We make this parser with add_help=False so that
# it doesn't parse -h and print help.
conf_parser = argparse.ArgumentParser(
description=__doc__, # printed with -h/--help
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
# Turn off help, so we print all options in response to -h
add_help=False
)
# Or the user may specify another
conf_parser.add_argument("-c", "--config", help="config file",
metavar="FILE", default=config_filename)
args, remaining_argv = conf_parser.parse_known_args()
# args.config is now either the default or the user specified config
# Add the config path to the name
args.config = os.path.join(get_config_path(), args.config)
config = ConfigParser.SafeConfigParser()
# Remove leading space from config pathname
args.config = args.config.strip()
if path.isfile(args.config) and access(args.config, R_OK):
# The file exists and can be read so grab defaults from it
config.read([args.config])
defaults = dict(config.items(section))
else:
# The file doesn't exist or can't be read so use the defaults
print('Config file %s is missing or is not readable', args.config)
sys.exit(0)
# Parse rest of arguments
# The user can override any argument configured by either the default or
# config file with a command line argument
# Don't suppress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser])
parser.set_defaults(**defaults)
# All tests have these parameters
parser.add_argument("-l", "--loglevel", help="set log level",
choices=['DEBUG','INFO','WARNING','ERROR','CRITICAL'])
parser.add_argument("-r", "--report", help="report filename")
# Caller can now add custom arguments
return parser, remaining_argv
def init_logging(argv, args, logger):
"""Set up logging
"""
ch = logging.StreamHandler(sys.stdout)
# Create the report directory if it doesn't exist
log_path = get_log_path()
if not os.path.exists(log_path):
os.makedirs(log_path)
if (not args.report or args.report.isspace()):
# Create a file based on the executable name
filename = os.path.splitext(os.path.basename(argv[0]))[0]
dt = '-' + time.strftime("%d-%m-%Y-%H-%M-%S")
args.report = filename + dt + '.txt'
report = os.path.join(log_path, args.report)
fh = RotatingFileHandler(report, maxBytes=4194304, backupCount=5)
if args.loglevel == 'DEBUG':
formatter = logging.Formatter('%(asctime)s:%(funcName)s:%(lineno)s:'
'%(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
# When logging to a file, emit the command line for reference
logger.error('Command line:')
s = ' '.join(argv)
logger.error(s)
logger.addHandler(ch)
logger.setLevel(args.loglevel)
# Assign log for paramiko events
paramiko.util.log_to_file(os.path.join(log_path, 'paramiko.log'))
logging.getLogger("paramiko").setLevel(args.loglevel)
return args.report
| gpl-3.0 |
alexcuellar/odoo | addons/product_email_template/models/invoice.py | 321 | 1969 | # -*- coding: utf-8 -*-
from openerp.osv import osv
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def invoice_validate_send_email(self, cr, uid, ids, context=None):
Composer = self.pool['mail.compose.message']
for invoice in self.browse(cr, uid, ids, context=context):
# send template only on customer invoice
if invoice.type != 'out_invoice':
continue
# subscribe the partner to the invoice
if invoice.partner_id not in invoice.message_follower_ids:
self.message_subscribe(cr, uid, [invoice.id], [invoice.partner_id.id], context=context)
for line in invoice.invoice_line:
if line.product_id.email_template_id:
# CLEANME: should define and use a clean API: message_post with a template
composer_id = Composer.create(cr, uid, {
'model': 'account.invoice',
'res_id': invoice.id,
'template_id': line.product_id.email_template_id.id,
'composition_mode': 'comment',
}, context=context)
template_values = Composer.onchange_template_id(
cr, uid, composer_id, line.product_id.email_template_id.id, 'comment', 'account.invoice', invoice.id
)['value']
template_values['attachment_ids'] = [(4, id) for id in template_values.get('attachment_ids', [])]
Composer.write(cr, uid, [composer_id], template_values, context=context)
Composer.send_mail(cr, uid, [composer_id], context=context)
return True
def invoice_validate(self, cr, uid, ids, context=None):
res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context)
self.invoice_validate_send_email(cr, uid, ids, context=context)
return res
| agpl-3.0 |
jessicalucci/NovaOrc | nova/compute/claims.py | 8 | 7169 | # Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Claim objects for use with resource tracking.
"""
from nova.openstack.common import jsonutils
from nova.openstack.common import lockutils
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
COMPUTE_RESOURCE_SEMAPHORE = "compute_resources"
class NopClaim(object):
"""For use with compute drivers that do not support resource tracking."""
def __init__(self, migration=None):
self.migration = migration
@property
def disk_gb(self):
return 0
@property
def memory_mb(self):
return 0
@property
def vcpus(self):
return 0
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
self.abort()
def abort(self):
pass
def __str__(self):
return "[Claim: %d MB memory, %d GB disk, %d VCPUS]" % (self.memory_mb,
self.disk_gb, self.vcpus)
class Claim(NopClaim):
"""A declaration that a compute host operation will require free resources.
Claims serve as marker objects that resources are being held until the
update_available_resource audit process runs to do a full reconciliation
of resource usage.
This information will be used to help keep the local compute hosts's
ComputeNode model in sync to aid the scheduler in making efficient / more
correct decisions with respect to host selection.
"""
def __init__(self, instance, tracker):
super(Claim, self).__init__()
self.instance = jsonutils.to_primitive(instance)
self.tracker = tracker
@property
def disk_gb(self):
return self.instance['root_gb'] + self.instance['ephemeral_gb']
@property
def memory_mb(self):
return self.instance['memory_mb']
@property
def vcpus(self):
return self.instance['vcpus']
@lockutils.synchronized(COMPUTE_RESOURCE_SEMAPHORE, 'nova-')
def abort(self):
"""Compute operation requiring claimed resources has failed or
been aborted.
"""
LOG.debug(_("Aborting claim: %s") % self, instance=self.instance)
self.tracker.abort_instance_claim(self.instance)
def test(self, resources, limits=None):
"""Test if this claim can be satisfied given available resources and
optional oversubscription limits
This should be called before the compute node actually consumes the
resources required to execute the claim.
:param resources: available local compute node resources
:returns: Return true if resources are available to claim.
"""
if not limits:
limits = {}
# If an individual limit is None, the resource will be considered
# unlimited:
memory_mb_limit = limits.get('memory_mb')
disk_gb_limit = limits.get('disk_gb')
vcpu_limit = limits.get('vcpu')
msg = _("Attempting claim: memory %(memory_mb)d MB, disk %(disk_gb)d "
"GB, VCPUs %(vcpus)d")
params = {'memory_mb': self.memory_mb, 'disk_gb': self.disk_gb,
'vcpus': self.vcpus}
LOG.audit(msg % params, instance=self.instance)
# Test for resources:
can_claim = (self._test_memory(resources, memory_mb_limit) and
self._test_disk(resources, disk_gb_limit) and
self._test_cpu(resources, vcpu_limit))
if can_claim:
LOG.audit(_("Claim successful"), instance=self.instance)
else:
LOG.audit(_("Claim failed"), instance=self.instance)
return can_claim
def _test_memory(self, resources, limit):
type_ = _("Memory")
unit = "MB"
total = resources['memory_mb']
used = resources['memory_mb_used']
requested = self.memory_mb
return self._test(type_, unit, total, used, requested, limit)
def _test_disk(self, resources, limit):
type_ = _("Disk")
unit = "GB"
total = resources['local_gb']
used = resources['local_gb_used']
requested = self.disk_gb
return self._test(type_, unit, total, used, requested, limit)
def _test_cpu(self, resources, limit):
type_ = _("CPU")
unit = "VCPUs"
total = resources['vcpus']
used = resources['vcpus_used']
requested = self.vcpus
return self._test(type_, unit, total, used, requested, limit)
def _test(self, type_, unit, total, used, requested, limit):
"""Test if the given type of resource needed for a claim can be safely
allocated.
"""
msg = _("Total %(type_)s: %(total)d %(unit)s, used: %(used)d %(unit)s")
LOG.audit(msg % locals(), instance=self.instance)
if limit is None:
# treat resource as unlimited:
LOG.audit(_("%(type_)s limit not specified, defaulting to "
"unlimited") % locals(), instance=self.instance)
return True
free = limit - used
# Oversubscribed resource policy info:
msg = _("%(type_)s limit: %(limit)d %(unit)s, free: %(free)d "
"%(unit)s") % locals()
LOG.audit(msg, instance=self.instance)
can_claim = requested <= free
if not can_claim:
msg = _("Unable to claim resources. Free %(type_)s %(free)d "
"%(unit)s < requested %(requested)d %(unit)s") % locals()
LOG.info(msg, instance=self.instance)
return can_claim
class ResizeClaim(Claim):
"""Claim used for holding resources for an incoming resize/migration
operation.
"""
def __init__(self, instance, instance_type, tracker):
super(ResizeClaim, self).__init__(instance, tracker)
self.instance_type = instance_type
self.migration = None
@property
def disk_gb(self):
return (self.instance_type['root_gb'] +
self.instance_type['ephemeral_gb'])
@property
def memory_mb(self):
return self.instance_type['memory_mb']
@property
def vcpus(self):
return self.instance_type['vcpus']
@lockutils.synchronized(COMPUTE_RESOURCE_SEMAPHORE, 'nova-')
def abort(self):
"""Compute operation requiring claimed resources has failed or
been aborted.
"""
LOG.debug(_("Aborting claim: %s") % self, instance=self.instance)
self.tracker.abort_resize_claim(self.instance['uuid'],
self.instance_type)
| apache-2.0 |
asimshankar/tensorflow | tensorflow/contrib/layers/python/layers/regularizers_test.py | 14 | 6634 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for regularizers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.layers.python.layers import regularizers
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class RegularizerTest(test.TestCase):
def test_l1(self):
with self.assertRaises(ValueError):
regularizers.l1_regularizer(-1.)
with self.assertRaises(ValueError):
regularizers.l1_regularizer(0)
self.assertIsNone(regularizers.l1_regularizer(0.)(None))
values = np.array([1., -1., 4., 2.])
weights = constant_op.constant(values)
with session.Session() as sess:
result = sess.run(regularizers.l1_regularizer(.5)(weights))
self.assertAllClose(np.abs(values).sum() * .5, result)
def test_l2(self):
with self.assertRaises(ValueError):
regularizers.l2_regularizer(-1.)
with self.assertRaises(ValueError):
regularizers.l2_regularizer(0)
self.assertIsNone(regularizers.l2_regularizer(0.)(None))
values = np.array([1., -1., 4., 2.])
weights = constant_op.constant(values)
with session.Session() as sess:
result = sess.run(regularizers.l2_regularizer(.42)(weights))
self.assertAllClose(np.power(values, 2).sum() / 2.0 * .42, result)
def test_l1_l2(self):
with self.assertRaises(ValueError):
regularizers.l1_l2_regularizer(-1., 0.5)
with self.assertRaises(ValueError):
regularizers.l1_l2_regularizer(0.5, -1.)
with self.assertRaises(ValueError):
regularizers.l1_l2_regularizer(0, 0.5)
with self.assertRaises(ValueError):
regularizers.l1_l2_regularizer(0.5, 0)
with self.cached_session():
shape = [5, 5, 5]
num_elem = 5 * 5 * 5
tensor = constant_op.constant(1.0, shape=shape)
loss = regularizers.l1_l2_regularizer(1.0, 1.0)(tensor)
self.assertEquals(loss.op.name, 'l1_l2_regularizer')
self.assertAlmostEqual(loss.eval(), num_elem + num_elem / 2, 5)
def test_l1_l2_scale_l1Zero(self):
shape = [5, 5, 5]
num_elem = 5 * 5 * 5
tensor = constant_op.constant(1.0, shape=shape)
loss = regularizers.l1_l2_regularizer(0.0, 1.0)(tensor)
with self.cached_session():
self.assertEquals(loss.op.name, 'l1_l2_regularizer')
self.assertAlmostEqual(loss.eval(), num_elem / 2, 5)
def test_l1_l2_scale_l2Zero(self):
shape = [5, 5, 5]
num_elem = 5 * 5 * 5
tensor = constant_op.constant(1.0, shape=shape)
loss = regularizers.l1_l2_regularizer(1.0, 0.0)(tensor)
with self.cached_session():
self.assertEquals(loss.op.name, 'l1_l2_regularizer')
self.assertAlmostEqual(loss.eval(), num_elem, 5)
def test_l1_l2_scales_Zero(self):
shape = [5, 5, 5]
tensor = constant_op.constant(1.0, shape=shape)
loss = regularizers.l1_l2_regularizer(0.0, 0.0)(tensor)
self.assertEquals(loss, None)
def testL1L2RegularizerWithScope(self):
with self.cached_session():
shape = [5, 5, 5]
num_elem = 5 * 5 * 5
tensor = constant_op.constant(1.0, shape=shape)
with ops.name_scope('foo'):
loss = regularizers.l1_l2_regularizer(1.0, 1.0, scope='l1_l2')(tensor)
self.assertEquals(loss.op.name, 'foo/l1_l2')
self.assertAlmostEqual(loss.eval(), num_elem + num_elem / 2, 5)
def test_sum_regularizer(self):
l1_function = regularizers.l1_regularizer(.1)
l2_function = regularizers.l2_regularizer(.2)
self.assertIsNone(regularizers.sum_regularizer([]))
self.assertIsNone(regularizers.sum_regularizer([None]))
values = np.array([-3.])
weights = constant_op.constant(values)
with session.Session() as sess:
l1_reg1 = regularizers.sum_regularizer([l1_function])
l1_result1 = sess.run(l1_reg1(weights))
l1_reg2 = regularizers.sum_regularizer([l1_function, None])
l1_result2 = sess.run(l1_reg2(weights))
l1_l2_reg = regularizers.sum_regularizer([l1_function, l2_function])
l1_l2_result = sess.run(l1_l2_reg(weights))
self.assertAllClose(.1 * np.abs(values).sum(), l1_result1)
self.assertAllClose(.1 * np.abs(values).sum(), l1_result2)
self.assertAllClose(
.1 * np.abs(values).sum() + .2 * np.power(values, 2).sum() / 2.0,
l1_l2_result)
def test_apply_regularization(self):
dummy_regularizer = lambda x: math_ops.reduce_sum(2 * x)
array_weights_list = [[1.5], [2, 3, 4.2], [10, 42, 666.6]]
tensor_weights_list = [constant_op.constant(x) for x in array_weights_list]
expected = sum(2 * x for l in array_weights_list for x in l)
with self.cached_session():
result = regularizers.apply_regularization(dummy_regularizer,
tensor_weights_list)
self.assertAllClose(expected, result.eval())
def test_apply_zero_regularization(self):
regularizer = regularizers.l2_regularizer(0.0)
array_weights_list = [[1.5], [2, 3, 4.2], [10, 42, 666.6]]
tensor_weights_list = [constant_op.constant(x) for x in array_weights_list]
with self.cached_session():
result = regularizers.apply_regularization(regularizer,
tensor_weights_list)
self.assertAllClose(0.0, result.eval())
def test_apply_regularization_invalid_regularizer(self):
non_scalar_regularizer = lambda x: array_ops.tile(x, [2])
tensor_weights_list = [
constant_op.constant(x) for x in [[1.5], [2, 3, 4.2], [10, 42, 666.6]]
]
with self.cached_session():
with self.assertRaises(ValueError):
regularizers.apply_regularization(non_scalar_regularizer,
tensor_weights_list)
if __name__ == '__main__':
test.main()
| apache-2.0 |
pavel-lazar/obsi | openbox/errors.py | 3 | 3423 | #!/usr/bin/env python
#
# Copyright (c) 2015 Pavel Lazar pavel.lazar (at) gmail.com
#
# The Software is provided WITHOUT ANY WARRANTY, EXPRESS OR IMPLIED.
#####################################################################
import traceback
from cStringIO import StringIO
from manager_exceptions import (ManagerError, EngineNotRunningError, ProcessingGraphNotSetError)
from configuration_builder.configuration_builder_exceptions import (ClickBlockConfigurationError,
ClickElementConfigurationError,
ConfigurationError,
ConnectionConfigurationError,
EngineConfigurationError,
EngineElementConfigurationError,
OpenBoxBlockConfigurationError,
OpenBoxConfigurationError)
class ErrorType:
BAD_REQUEST = 'BAD_REQUEST'
FORBIDDEN = 'FORBIDDEN'
UNSUPPORTED = 'UNSUPPORTED'
INTERNAL_ERROR = 'INTERNAL_ERROR'
class ErrorSubType:
# BAD_REQUEST
BAD_VERSION = 'BAD_VERSION'
BAD_TYPE = 'BAD_TYPE'
BAD_GRAPH = 'BAD_GRAPH'
BAD_BLOCK = 'BAD_BLOCK'
BAD_CONNECTOR = 'BAD_CONNECTOR'
BAD_HEADER_MATCH = 'BAD_HEADER_MATCH'
BAD_PAYLOAD_MATCH = 'BAD_PAYLOAD_MATCH'
BAD_FILE = 'BAD_FILE'
ILLEGAL_ARGUMENT = 'ILLEGAL_ARGUMENT'
ILLEGAL_STATE = 'ILLEGAL_STATE'
# FORBIDDEN
NOT_PERMITTED = 'NOT_PERMITTED'
NO_ACCESS = 'NO_ACCESS'
# UNSUPPORTED
UNSUPPORTED_VERSION = 'UNSUPPORTED_VERSION'
UNSUPPORTED_BLOCK = 'UNSUPPORTED_BLOCK'
UNSUPPORTED_MESSAGE = 'UNSUPPORTED_MESSAGE'
UNSUPPORTED_OTHER = 'UNSUPPORTED_OTHER'
# INTERNAL
ADD_MODULE_FAILED = 'ADD_MODULE_FAILED'
INTERNAL_ERROR = 'INTERNAL_ERROR'
def _traceback_string(exc_tb):
tb_file = StringIO()
traceback.print_tb(exc_tb, file=tb_file)
return tb_file.getvalue()
def exception_to_error_args(exc_type, exc_value, exc_tb):
error_type = ErrorType.INTERNAL_ERROR
error_subtype = ErrorSubType.INTERNAL_ERROR
exception_message = exc_value.message or "General internal error"
extended_message = _traceback_string(exc_tb)
if exc_type == EngineNotRunningError:
exception_message = "Engine is not running"
elif exc_type == ProcessingGraphNotSetError:
error_type = ErrorType.BAD_REQUEST
error_subtype = ErrorSubType.ILLEGAL_STATE
exception_message = "Processing graph is not set"
elif exc_type in (EngineElementConfigurationError, ClickElementConfigurationError, ClickBlockConfigurationError,
OpenBoxBlockConfigurationError):
error_type = ErrorType.BAD_REQUEST
error_subtype = ErrorSubType.BAD_BLOCK
elif exc_type == ConnectionConfigurationError:
error_type = ErrorType.BAD_REQUEST
error_subtype = ErrorSubType.BAD_CONNECTOR
elif exc_type in (OpenBoxConfigurationError, EngineConfigurationError, ConfigurationError):
error_type = ErrorType.BAD_REQUEST
error_subtype = ErrorSubType.BAD_GRAPH
return error_type, error_subtype, exception_message, extended_message
| apache-2.0 |
MarkHedleyJones/Electrode_Interface_Model | lib/interface.py | 1 | 15374 | import math
import mpmath
import lib.solutions.pbs.modelParameters
class SpiceComponent(object):
componentType = None
componentValue = None
def __init__(self, componentType, componentValue):
self.componentType = componentType
self.componentValue = componentValue
class Model(object):
parameterSet = None
includes_diode = True
includes_memristor = True
autoCircuits = None
impedeors = None
def __init__(self, parameterSet):
miss = 'WARNING: parameter'
self.includes_diode = True
self.includes_memristor = True
self.parameterSet = parameterSet
if parameterSet.faradaic_CM() is None:
miss += ' cm'
self.includes_memristor = False
if parameterSet.faradaic_RM() is None:
miss += ' rm'
self.includes_memristor = False
if parameterSet.faradaic_i0() is None:
miss += ' i0'
self.includes_diode = False
if parameterSet.faradaic_n() is None:
miss += ' n'
self.includes_diode = False
# if self.includes_diode == False:
# print miss + ' is not available from parameter set!'
# print 'Excluding Faradaic (memristor and diode) components!'
# elif self.includes_memristor == False:
# print miss + ' is not available from parameter set!'
# print 'Excluding memristive component!'
self.autoCircuits = []
self.impedeors = {}
def subcircuit_impedance(self, real, imaginish, name='impedeor'):
'''
Generates a generic impedance element from a resistor, capacitor and
an inductor
'''
# Generate the resistor ladder
out = []
out.append("*************************Impedeor subckt")
out.append(".SUBCKT " + name + " a b")
out.append("R1 a mid " + str(real))
if imaginish > 0.0:
out.append("L1 mid b " + str(imaginish))
out.append("R2 mid b 1e9")
elif imaginish < 0.0:
out.append("C1 mid b " + str(-imaginish))
out.append("R2 mid b 1e9")
else:
out.append('R2 mid b 0')
out.append(".ENDS " + name)
return out
def impedeor(self, name, nodeA, nodeB, value):
if value in self.impedeors:
return ('X_' + str(name) + ' ' +
str(nodeA) + ' ' +
str(nodeB) + ' ' +
str(self.impedeors[value]))
else:
subCktName = 'impedeor' + str(len(self.impedeors))
self.autoCircuits.append(self.subcircuit_impedance(value.real,
value.imag,
subCktName))
self.impedeors[value] = subCktName
return ('X_' + str(name) + ' ' +
str(nodeA) + ' ' +
str(nodeB) + ' ' +
str(subCktName))
def as_spiceComponent(self, item, nodeA, nodeB, elementNumber):
"""
Allows passing of individual spice components or subcircuit names for
combination in the combine_subcircuits function.
"""
if type(item) == SpiceComponent:
if type(item.componentValue) == complex:
return self.impedeor(item.componentType + str(elementNumber),
nodeA,
nodeB,
item.componentValue)
else:
return (item.componentType + str(elementNumber) + ' ' +
str(nodeA) + ' ' +
str(nodeB) + ' ' +
str(item.componentValue))
else:
return ('X_' + str(elementNumber) + ' ' +
str(nodeA) + ' ' +
str(nodeB) + ' ' +
item)
def subcircuit_ladder(self, electrodes=8, depth=5, padding=3, name='ladder'):
"""
Generates a resistor ladder circuit for insertion into a spice file.
Parameters:
"""
Rv_commence = self.parameterSet.ladder_Resistor_LongitudinalCommence()
Rr_insulator = self.parameterSet.ladder_Resistor_RadialInsulator()
Rr_electrode = self.parameterSet.ladder_Resistor_RadialElectrode()
# Populate the latitude resistor value array
Rv = []
for i in range(depth):
Rv.append(Rv_commence / pow(4, i))
# Keep track of which nodes correspond to which electrodes
nodes = {}
# Generate the resistor ladder
out = []
out.append("****************************************")
out.append("* Resistor ladder start *")
out.append("****************************************")
tmp = ".SUBCKT " + name
for electrode in range(electrodes):
tmp += " e" + str((electrode + 1))
out.append(tmp)
# Figure out which nodes correspond to electrodes
for row in range(((electrodes + electrodes - 1) * 2 - 1) + 4 * padding):
for col in range(depth):
if col == 0 and row % 2 == 0:
actRow = row / 2
if actRow < (padding + (electrodes * 2) - 1):
segment = (actRow - padding)
if segment % 2 == 0 and segment >= 0:
nodes[col + (int(row / 2) * 5) + 1] = (int(segment / 2)
+ 1)
# Step over each component adding as necessary
for row in range(((electrodes + electrodes - 1) * 2 - 1) + 4 * padding):
for col in range(depth):
fromNode = col + (int(row / 2) * 5) + 1
if (row % 2) == 0:
if (row / 2 >= padding and
row / 2 < (padding + (electrodes * 2) - 1) and
(row / 2 - padding) % 2 != 0):
value = Rr_insulator
else:
value = Rr_electrode
component = "RRAD_" + str(row + 1) + "_" + str(col + 1)
if col == (len(Rv) - 1):
toNode = 1000
else:
toNode = col + (int(row / 2) * 5) + 2
else:
value = Rv[col]
toNode = col + (int(row / 2 + 1) * 5) + 1
component = "RVERT_" + str(row + 1) + "_" + str(col + 1)
if fromNode in nodes:
fromNode = 'e' + str(nodes[fromNode])
if toNode in nodes:
toNode = 'e' + str(nodes[toNode])
# out.append(str(component) + ' ' +
# str(fromNode) + ' ' +
# str(toNode) + ' ' +
# str(value))
if type(value) == complex:
out.append(self.impedeor(str(component),
str(fromNode),
str(toNode),
value))
else:
out.append(str(component) + ' ' +
str(fromNode) + ' ' +
str(toNode) + ' ' +
str(value))
out.append(".ENDS " + name)
return out
def subcircuit_faradaic(self, memristor=True, name='faradaic'):
"""
Generates the ngspice compatible subcircuit named faradaic that simulates
the faradic component in the interface. This includes the diode and
memristor branches
"""
i0 = self.parameterSet.faradaic_i0()
n = self.parameterSet.faradaic_n()
k = 1.38e-23
T = self.parameterSet.temperature() + 273.15
T = 300
q = 1.60e-19
Vt = (k * T) / q
out = ["****************************************",
"* Faradaic branch start *",
"****************************************"]
if memristor == True:
cm = self.parameterSet.faradaic_CM()
rm = self.parameterSet.faradaic_RM()
out += [".SUBCKT " + name + " n1 n2",
".PARAM Vt=" + str(Vt),
".PARAM i0=" + str(i0),
".PARAM n=" + str(n),
".PARAM CM=" + str(cm),
".PARAM RM=" + str(rm),
".PARAM nVt=n*Vt",
"Bdm1 n1 n2 I=i0*(1-v(mset))*exp(v(n1,n2)/nVt)",
"Bdm2 n2 n1 I=i0*(1+v(mset))*exp(v(n2,n1)/nVt)",
"Bdm1cpy 0 mset I=i0*(1-v(mset))*exp(v(n1,n2)/nVt)",
"Bdm2cpy mset 0 I=i0*(1+v(mset))*exp(v(n2,n1)/nVt)",
"R_b n1 n2 1e10"
"C_M mset 0 cm",
"R_M mset 0 rm",
".ENDS " + name]
else:
out += [".SUBCKT " + name + " n1 n2",
".PARAM Vt=" + str(Vt),
".PARAM i0=" + str(i0),
".PARAM n=" + str(n),
".PARAM nVt=n*Vt",
"Bdm1 n1 n2 I=i0*exp(v(n1,n2)/nVt)",
"Bdm2 n2 n1 I=i0*exp(v(n2,n1)/nVt)",
"R_b n1 n2 1e10",
".ENDS " + name]
return out
def subcircuit_displacement(self,
fmin=1e-6,
fmax=1e6,
elementsPerDecade=3,
name='displacement',
bypassRes=1e10):
"""
Generates the ngspice compatible subcircuit named fracpole ready for
inclusion into a spice file
"""
# Extend freq range so no funny business appears
fmin /= 1000.00
fmax *= 1000.00
print(fmin)
print(fmax)
cpe_mag = self.parameterSet.displacement_mag()
cpe_slope = self.parameterSet.displacement_slope()
m = self.parameterSet.displacement_m()
# Calculate the number of elements to place in this range
numPts = (math.log10(fmax) - math.log10(fmin)) * elementsPerDecade
# Calculate the frequency scaling factor
k_f = math.exp((math.log(fmax) - math.log(fmin)) / numPts)
# Generate the frequency positions for the cpe branches
pts = []
for i in range(int(numPts) + 1):
pts.append(fmin * math.pow(k_f, i))
# Determine k - the multiplicity factor
k = math.pow(k_f, 1 / m)
y_theta = ((math.pi / (m * math.log(k))) *
mpmath.sec(0.5 * math.pi * (1 - (2 / m))))
out = []
out.append("****************************************")
out.append("* Fracpole/CPE start *")
out.append("****************************************")
fracpoleElements = []
for point in pts:
omega = 2 * math.pi * point
Z = cpe_mag * math.pow(point, cpe_slope)
R = Z
C = math.pow((R / (y_theta * Z)), m) / (omega * R)
fracpoleElements.append({'frequency': point, 'R': R, 'C': C})
out.append(".SUBCKT " + name + " a b")
for num, facpoleElement in enumerate(fracpoleElements):
out.append("R" + str(num) + " a " + str(num + 1)
+ " " + str(facpoleElement['R']))
out.append("C" + str(num) + " " + str(num + 1)
+ " b " + str(facpoleElement['C']))
out.append("R" + str(num + 1) + " a b " + str(bypassRes))
out.append(".ENDS " + name)
return out
def combine_subcircuits(self, input_elements, output_subcircuitName):
"""
Combile an array of subcircuit names and spice components according
to the nesting of the input array (input_subcircuitNames).
E.g.
Combine sub1 in series with a parallel combination of sub2 and sub3
which is then in series with sub4
input = ['sub1', ['sub2', 'sub3'], 'sub4']
spice output:
XC1 a n1 sub1
XC2 n1 n2 sub2
XC3 n1 n2 sub3
XC4 n2 b sub4
"""
out = []
out.append("****************************************")
out.append("* Combine subcircuits *")
out.append("****************************************")
out.append(".SUBCKT " + output_subcircuitName + " a b")
nodeCount = 0
elementCount = 1
seriesLength = len(input_elements)
for index, element in enumerate(input_elements):
incCount = False
# Determine name of this node
if index == 0:
thisNode = 'a'
else:
thisNode = 'n' + str(nodeCount)
incCount = True
# Determine name of next node
if index == (seriesLength - 1):
nextNode = 'b'
else:
nextNode = 'n' + str(nodeCount + 1)
incCount = True
if type(element) == list:
for parallelElement in element:
out.append(self.as_spiceComponent(parallelElement,
thisNode,
nextNode,
elementCount))
elementCount += 1
else:
out.append(self.as_spiceComponent(element,
thisNode,
nextNode,
elementCount))
elementCount += 1
if incCount == True:
nodeCount += 1
out.append(".ENDS " + output_subcircuitName)
return out
def get_spiceModel(self):
out = []
out.append('electrodeModel')
if self.includes_diode:
out += self.subcircuit_faradaic(memristor=self.includes_memristor)
out += self.subcircuit_ladder()
out += self.subcircuit_displacement()
# r_series = SpiceComponent('R', self.parameterSet.seriesResistance())
r_series = SpiceComponent('R', self.parameterSet.seriesResistance())
if self.includes_diode:
model_layout = [['faradaic', 'displacement'], r_series]
else:
model_layout = ['displacement', r_series]
out += self.combine_subcircuits(model_layout, 'interface')
for autoCircuit in self.autoCircuits:
out += autoCircuit
out.append("****************************************")
out.append("* Circuit description *")
out.append("****************************************")
out.append('X_ladder w1 w2 w3 w4 w5 w6 w7 w8 ladder')
for i in range(1, 9):
out.append('X_interface' + str(i) + ' ' +
'e' + str(i) + ' ' +
'w' + str(i) + ' ' +
'interface')
return out
| mit |
RobinQuetin/CAIRIS-web | cairis/cairis/ModelExport.py | 1 | 8404 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from Borg import Borg
from kaosxdot import KaosXDotParser
from componentxdot import ComponentXDotParser
import cairo
import pangocairo
from ConceptMapModel import ConceptMapModel
from ComponentModel import ComponentModel
from AssetModel import AssetModel
from KaosModel import KaosModel
import os
import re
def listToString(l):
s = ''
listSize = len(l)
if listSize == 0:
return 'None'
for idx,v in enumerate(l):
s += v
if idx < (listSize - 1):
s += ','
return s
def drawGraph(graph,graphName):
b = Borg()
tmpDir = b.tmpDir
outputDir = os.environ['OUTPUT_DIR']
tmpFile = tmpDir + '/' + graphName + '.pdf'
#Make the surface a bit bigger to account for graphviz positioning the image too far left
s = cairo.PDFSurface(tmpFile,graph.width + 5,graph.height + 5)
c1 = cairo.Context(s)
c2 = pangocairo.CairoContext(c1)
c2.set_line_cap(cairo.LINE_CAP_BUTT)
c2.set_line_join(cairo.LINE_JOIN_MITER)
graph.zoom_ratio = 1
#Reposition the co-ordinates to start a bit more to the right
c2.translate(3,3)
graph.draw(c2)
s.finish()
svgFile = tmpDir + '/' + graphName + '.svg'
s = cairo.SVGSurface(svgFile,graph.width,graph.height)
c1 = cairo.Context(s)
c2 = pangocairo.CairoContext(c1)
c2.set_line_cap(cairo.LINE_CAP_BUTT)
c2.set_line_join(cairo.LINE_JOIN_MITER)
graph.zoom_ratio = 1
graph.draw(c2)
s.finish()
ppmFile = tmpDir + '/' + graphName + '.ppm'
jpgFile = outputDir + '/' + graphName
cmd1 = 'pdftoppm ' + tmpFile + ' > ' + ppmFile
cmd2 = 'ppmtojpeg ' + ppmFile + ' > ' + jpgFile
os.system(cmd1)
os.system(cmd2)
def buildConceptMap(p,envName,graphName):
model = ConceptMapModel(p.conceptMapModel(envName).values(),envName,'',True)
if (model.size() == 0):
return False
parser = KaosXDotParser('conceptmap',model.graph())
parser.cfSet = True
graph = parser.parse()
drawGraph(graph,graphName)
return True
def exportRedmineScenarios(outFile):
b = Borg()
rmScenarios = b.dbProxy.redmineScenarios()
buf = ''
noScenarios = 0
for sName,sEnv,sTxt in rmScenarios:
buf += sTxt + '\n'
noScenarios += 1
sFile = open(outFile,'w')
sFile.write(buf)
sFile.close()
return 'Exported ' + str(noScenarios) + ' scenarios.'
def exportRedmineUseCases(outFile):
b = Borg()
rmUseCases = b.dbProxy.redmineUseCases()
buf = ''
noUseCases = 0
ucDict = {'ID':[],'DA':[],'NM':[],'PS':[],'NC':[],'LC':[],'CAP':[],'TMS':[]}
for ucName,ucShortCode,ucAuthor,ucTxt in rmUseCases:
ucCat = re.sub('[0-9]','',ucShortCode)
ucDict[ucCat].append( (ucName,ucShortCode,ucAuthor,ucTxt))
fnlCats = ucDict.keys()
fnlCats.sort()
for fnlCat in fnlCats:
for ucName,ucShortCode,ucAuthor,ucTxt in ucDict[fnlCat]:
buf += ucTxt + '\n'
noUseCases += 1
ucFile = open(outFile,'w')
ucFile.write(buf)
ucFile.close()
return 'Exported ' + str(noUseCases) + ' use cases.'
def exportRedmineRequirements(outFileName):
b = Borg()
reqs = b.dbProxy.getRedmineRequirements()
envNames = reqs.keys()
envNames.sort()
outputDir = os.environ['OUTPUT_DIR']
outputBuf = ''
for envName in envNames:
envReqs = reqs[envName]
envCode = envReqs[0][5]
buf = 'h1. ' + envName + ' requirements\n\n'
cmFile = envCode + '_conceptMap'
buildConceptMap(b.dbProxy,envName,cmFile)
buf +='!' + cmFile + '!\n\n'
buf += '|*Short Name*|*Comments*|*Scenarios*|*Use Cases*|*Backlog*|\n'
for envReq in envReqs:
reqName = envReq[0]
reqOrig = envReq[1]
reqPri = envReq[2]
reqComments = envReq[3]
reqDesc = envReq[4]
reqScs = envReq[6]
reqUcs = envReq[7]
reqBis = envReq[8]
buf += '|/2.*' + reqName + '*\n' + reqPri + ', ' + reqOrig + '|\\4.' + reqDesc + '|\n|' + reqComments + '|' + listToString(reqScs) + '|' + listToString(reqUcs) + '|' + listToString(reqBis) + '|\n'
envFile = open(outputDir + '/' + envCode + '-requirements.txt','w,')
envFile.write(buf)
envFile.close()
outputBuf += buf + '\n'
outputFile = open(outFileName,'w')
outputFile.write(outputBuf)
outputFile.close()
return 'Exported requirements'
def exportGRL(outFileName,personaNames,taskNames,envName):
b = Borg()
pStr = ', '.join(personaNames)
tStr = ', '.join(taskNames)
buf = b.dbProxy.pcToGrl(pStr,tStr,envName)
rFile = open(outFileName,'w')
rFile.write(buf)
rFile.close()
return 'Exported GRL for ' + pStr + ' in tasks ' + tStr + ' situated in environment ' + envName
def buildComponentModel(p,apName,graphName):
interfaces,connectors = p.componentView(apName)
model = ComponentModel(interfaces,connectors)
parser = ComponentXDotParser(model.graph())
graph = parser.parse()
drawGraph(graph,graphName)
return True
def buildComponentAssetModel(p,cName,graphName):
assocs = p.componentAssetModel(cName)
model = AssetModel(assocs.values(),'')
parser = KaosXDotParser('class',model.graph())
graph = parser.parse()
drawGraph(graph,graphName)
return True
def buildComponentGoalModel(p,cName,graphName):
assocs = p.componentGoalModel(cName)
model = KaosModel(assocs.values(),'','template_goal')
parser = KaosXDotParser('goal',model.graph())
graph = parser.parse()
drawGraph(graph,graphName)
return True
def buildRiskObstacleModel(p,apName,envName,graphName):
assocs = p.riskObstacleModel(apName,envName)
model = KaosModel(assocs.values(),envName,'obstacle',apName)
parser = KaosXDotParser('obstacle',model.graph())
graph = parser.parse()
drawGraph(graph,graphName)
return True
def exportArchitecture(outFile):
b = Borg()
rmArchitecture = b.dbProxy.redmineArchitecture()
buf = ''
noAPs = 0
for aName,aType,sTxt in rmArchitecture:
buf += sTxt + '\n'
noAPs += 1
if (aType == 'component'):
caName = aName.replace(' ','_') + 'AssetModel.jpg'
cgName = aName.replace(' ','_') + 'GoalModel.jpg'
buildComponentAssetModel(b.dbProxy,aName,caName)
buildComponentGoalModel(b.dbProxy,aName,cgName)
elif (aType == 'architectural_pattern'):
graphName = aName.replace(' ','_') + 'ComponentModel.jpg'
buildComponentModel(b.dbProxy,aName,graphName)
aFile = open(outFile,'w')
aFile.write(buf)
aFile.close()
outFilePrefix,outFilePostfix = outFile.split('.')
summaryFile = outFilePrefix + '-summary.' + outFilePostfix
archSumm = b.dbProxy.redmineArchitectureSummary('Complete')
buf = ''
for aName,sTxt in archSumm:
buf += sTxt + '\n'
aFile = open(summaryFile,'w')
aFile.write(buf)
aFile.close()
return 'Exported ' + str(noAPs) + ' architectural patterns.'
def exportAttackPatterns(outFile):
b = Borg()
rmAttackPatterns = b.dbProxy.redmineAttackPatterns()
buf = 'h1. Contextualised Attack Patterns\n\nThis section was automatically generated based on the contents of the webinos WP 2 git repository at http://dev.webinos.org/git/wp2.git.\n\nh2. Obstacle probability: colour codes\n\n!{width:200px}ObsColour.jpg!\n\n'
apdxBuf = ''
noAPs = 0
for apName,envName,cType,apTxt in rmAttackPatterns:
if (cType == 'body'):
buf += apTxt + '\n'
gmName = apName.replace(' ','_') + 'ObstacleModel.jpg'
buildRiskObstacleModel(b.dbProxy,apName,envName,gmName)
else:
apdxBuf += apTxt + '\n'
noAPs += 1
aFile = open(outFile,'w')
aFile.write(buf)
aFile.close()
fileName,filePostfix = outFile.split('.')
summaryFile = fileName + '-summary.txt'
buf = b.dbProxy.redmineAttackPatternsSummary('Complete')
aFile = open(summaryFile,'w')
aFile.write(buf)
aFile.close()
return 'Exported ' + str(noAPs) + ' attack patterns.'
| apache-2.0 |
Evervolv/android_external_chromium_org | third_party/protobuf/python/google/protobuf/message_factory.py | 228 | 4178 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides a factory class for generating dynamic messages."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
from google.protobuf import descriptor_database
from google.protobuf import descriptor_pool
from google.protobuf import message
from google.protobuf import reflection
class MessageFactory(object):
"""Factory for creating Proto2 messages from descriptors in a pool."""
def __init__(self):
"""Initializes a new factory."""
self._classes = {}
def GetPrototype(self, descriptor):
"""Builds a proto2 message class based on the passed in descriptor.
Passing a descriptor with a fully qualified name matching a previous
invocation will cause the same class to be returned.
Args:
descriptor: The descriptor to build from.
Returns:
A class describing the passed in descriptor.
"""
if descriptor.full_name not in self._classes:
result_class = reflection.GeneratedProtocolMessageType(
descriptor.name.encode('ascii', 'ignore'),
(message.Message,),
{'DESCRIPTOR': descriptor})
self._classes[descriptor.full_name] = result_class
for field in descriptor.fields:
if field.message_type:
self.GetPrototype(field.message_type)
return self._classes[descriptor.full_name]
_DB = descriptor_database.DescriptorDatabase()
_POOL = descriptor_pool.DescriptorPool(_DB)
_FACTORY = MessageFactory()
def GetMessages(file_protos):
"""Builds a dictionary of all the messages available in a set of files.
Args:
file_protos: A sequence of file protos to build messages out of.
Returns:
A dictionary containing all the message types in the files mapping the
fully qualified name to a Message subclass for the descriptor.
"""
result = {}
for file_proto in file_protos:
_DB.Add(file_proto)
for file_proto in file_protos:
for desc in _GetAllDescriptors(file_proto.message_type, file_proto.package):
result[desc.full_name] = _FACTORY.GetPrototype(desc)
return result
def _GetAllDescriptors(desc_protos, package):
"""Gets all levels of nested message types as a flattened list of descriptors.
Args:
desc_protos: The descriptor protos to process.
package: The package where the protos are defined.
Yields:
Each message descriptor for each nested type.
"""
for desc_proto in desc_protos:
name = '.'.join((package, desc_proto.name))
yield _POOL.FindMessageTypeByName(name)
for nested_desc in _GetAllDescriptors(desc_proto.nested_type, name):
yield nested_desc
| bsd-3-clause |
hellofreedom/ansible-modules-core | cloud/openstack/os_nova_flavor.py | 93 | 6844 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_nova_flavor
short_description: Manage OpenStack compute flavors
extends_documentation_fragment: openstack
version_added: "2.0"
author: "David Shrewsbury (@Shrews)"
description:
- Add or remove flavors from OpenStack.
options:
state:
description:
- Indicate desired state of the resource. When I(state) is 'present',
then I(ram), I(vcpus), and I(disk) are all required. There are no
default values for those parameters.
choices: ['present', 'absent']
required: false
default: present
name:
description:
- Flavor name.
required: true
ram:
description:
- Amount of memory, in MB.
required: false
default: null
vcpus:
description:
- Number of virtual CPUs.
required: false
default: null
disk:
description:
- Size of local disk, in GB.
required: false
default: null
ephemeral:
description:
- Ephemeral space size, in GB.
required: false
default: 0
swap:
description:
- Swap space size, in MB.
required: false
default: 0
rxtx_factor:
description:
- RX/TX factor.
required: false
default: 1.0
is_public:
description:
- Make flavor accessible to the public.
required: false
default: true
flavorid:
description:
- ID for the flavor. This is optional as a unique UUID will be
assigned if a value is not specified.
required: false
default: "auto"
requirements: ["shade"]
'''
EXAMPLES = '''
# Create 'tiny' flavor with 1024MB of RAM, 1 virtual CPU, and 10GB of
# local disk, and 10GB of ephemeral.
- os_nova_flavor:
cloud=mycloud
state=present
name=tiny
ram=1024
vcpus=1
disk=10
ephemeral=10
# Delete 'tiny' flavor
- os_nova_flavor:
cloud=mycloud
state=absent
name=tiny
'''
RETURN = '''
flavor:
description: Dictionary describing the flavor.
returned: On success when I(state) is 'present'
type: dictionary
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
disk:
description: Size of local disk, in GB.
returned: success
type: int
sample: 10
ephemeral:
description: Ephemeral space size, in GB.
returned: success
type: int
sample: 10
ram:
description: Amount of memory, in MB.
returned: success
type: int
sample: 1024
swap:
description: Swap space size, in MB.
returned: success
type: int
sample: 100
vcpus:
description: Number of virtual CPUs.
returned: success
type: int
sample: 2
is_public:
description: Make flavor accessible to the public.
returned: success
type: bool
sample: true
'''
def _system_state_change(module, flavor):
state = module.params['state']
if state == 'present' and not flavor:
return True
if state == 'absent' and flavor:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
state = dict(required=False, default='present',
choices=['absent', 'present']),
name = dict(required=False),
# required when state is 'present'
ram = dict(required=False, type='int'),
vcpus = dict(required=False, type='int'),
disk = dict(required=False, type='int'),
ephemeral = dict(required=False, default=0, type='int'),
swap = dict(required=False, default=0, type='int'),
rxtx_factor = dict(required=False, default=1.0, type='float'),
is_public = dict(required=False, default=True, type='bool'),
flavorid = dict(required=False, default="auto"),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['ram', 'vcpus', 'disk'])
],
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
state = module.params['state']
name = module.params['name']
try:
cloud = shade.operator_cloud(**module.params)
flavor = cloud.get_flavor(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(module, flavor))
if state == 'present':
if not flavor:
flavor = cloud.create_flavor(
name=name,
ram=module.params['ram'],
vcpus=module.params['vcpus'],
disk=module.params['disk'],
flavorid=module.params['flavorid'],
ephemeral=module.params['ephemeral'],
swap=module.params['swap'],
rxtx_factor=module.params['rxtx_factor'],
is_public=module.params['is_public']
)
module.exit_json(changed=True, flavor=flavor)
module.exit_json(changed=False, flavor=flavor)
elif state == 'absent':
if flavor:
cloud.delete_flavor(name)
module.exit_json(changed=True)
module.exit_json(changed=False)
except shade.OpenStackCloudException as e:
module.fail_json(msg=e.message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
CiscoSystems/nova | nova/virt/hyperv/vhdutils.py | 13 | 7850 | # Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VHD related operations.
Official VHD format specs can be retrieved at:
http://technet.microsoft.com/en-us/library/bb676673.aspx
See "Download the Specifications Without Registering"
Official VHDX format specs can be retrieved at:
http://www.microsoft.com/en-us/download/details.aspx?id=34750
"""
import struct
import sys
if sys.platform == 'win32':
import wmi
from nova.openstack.common.gettextutils import _
from nova.virt.hyperv import constants
from nova.virt.hyperv import vmutils
from xml.etree import ElementTree
VHD_HEADER_SIZE_FIX = 512
VHD_BAT_ENTRY_SIZE = 4
VHD_DYNAMIC_DISK_HEADER_SIZE = 1024
VHD_HEADER_SIZE_DYNAMIC = 512
VHD_FOOTER_SIZE_DYNAMIC = 512
VHD_BLK_SIZE_OFFSET = 544
VHD_SIGNATURE = 'conectix'
VHDX_SIGNATURE = 'vhdxfile'
class VHDUtils(object):
def __init__(self):
self._vmutils = vmutils.VMUtils()
if sys.platform == 'win32':
self._conn = wmi.WMI(moniker='//./root/virtualization')
def validate_vhd(self, vhd_path):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.ValidateVirtualHardDisk(
Path=vhd_path)
self._vmutils.check_ret_val(ret_val, job_path)
def create_dynamic_vhd(self, path, max_internal_size, format):
if format != constants.DISK_FORMAT_VHD:
raise vmutils.HyperVException(_("Unsupported disk format: %s") %
format)
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.CreateDynamicVirtualHardDisk(
Path=path, MaxInternalSize=max_internal_size)
self._vmutils.check_ret_val(ret_val, job_path)
def create_differencing_vhd(self, path, parent_path):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.CreateDifferencingVirtualHardDisk(
Path=path, ParentPath=parent_path)
self._vmutils.check_ret_val(ret_val, job_path)
def reconnect_parent_vhd(self, child_vhd_path, parent_vhd_path):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.ReconnectParentVirtualHardDisk(
ChildPath=child_vhd_path,
ParentPath=parent_vhd_path,
Force=True)
self._vmutils.check_ret_val(ret_val, job_path)
def merge_vhd(self, src_vhd_path, dest_vhd_path):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(job_path, ret_val) = image_man_svc.MergeVirtualHardDisk(
SourcePath=src_vhd_path,
DestinationPath=dest_vhd_path)
self._vmutils.check_ret_val(ret_val, job_path)
def _get_resize_method(self):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
return image_man_svc.ExpandVirtualHardDisk
def resize_vhd(self, vhd_path, new_max_size, is_file_max_size=True):
if is_file_max_size:
new_internal_max_size = self.get_internal_vhd_size_by_file_size(
vhd_path, new_max_size)
else:
new_internal_max_size = new_max_size
resize = self._get_resize_method()
(job_path, ret_val) = resize(
Path=vhd_path, MaxInternalSize=new_internal_max_size)
self._vmutils.check_ret_val(ret_val, job_path)
def get_internal_vhd_size_by_file_size(self, vhd_path, new_vhd_file_size):
"""Fixed VHD size = Data Block size + 512 bytes
Dynamic_VHD_size = Dynamic Disk Header
+ Copy of hard disk footer
+ Hard Disk Footer
+ Data Block
+ BAT
Dynamic Disk header fields
Copy of hard disk footer (512 bytes)
Dynamic Disk Header (1024 bytes)
BAT (Block Allocation table)
Data Block 1
Data Block 2
Data Block n
Hard Disk Footer (512 bytes)
Default block size is 2M
BAT entry size is 4byte
"""
base_vhd_info = self.get_vhd_info(vhd_path)
vhd_type = base_vhd_info['Type']
if vhd_type == constants.VHD_TYPE_FIXED:
vhd_header_size = VHD_HEADER_SIZE_FIX
return new_vhd_file_size - vhd_header_size
elif vhd_type == constants.VHD_TYPE_DYNAMIC:
bs = self._get_vhd_dynamic_blk_size(vhd_path)
bes = VHD_BAT_ENTRY_SIZE
ddhs = VHD_DYNAMIC_DISK_HEADER_SIZE
hs = VHD_HEADER_SIZE_DYNAMIC
fs = VHD_FOOTER_SIZE_DYNAMIC
max_internal_size = (new_vhd_file_size -
(hs + ddhs + fs)) * bs / (bes + bs)
return max_internal_size
else:
raise vmutils.HyperVException(_("The %(vhd_type)s type VHD "
"is not supported") %
{"vhd_type": vhd_type})
def _get_vhd_dynamic_blk_size(self, vhd_path):
blk_size_offset = VHD_BLK_SIZE_OFFSET
try:
with open(vhd_path, "rb") as f:
f.seek(blk_size_offset)
version = f.read(4)
except IOError:
raise vmutils.HyperVException(_("Unable to obtain block size from"
" VHD %(vhd_path)s") %
{"vhd_path": vhd_path})
return struct.unpack('>i', version)[0]
def get_vhd_parent_path(self, vhd_path):
return self.get_vhd_info(vhd_path).get("ParentPath")
def get_vhd_info(self, vhd_path):
image_man_svc = self._conn.Msvm_ImageManagementService()[0]
(vhd_info,
job_path,
ret_val) = image_man_svc.GetVirtualHardDiskInfo(vhd_path)
self._vmutils.check_ret_val(ret_val, job_path)
vhd_info_dict = {}
et = ElementTree.fromstring(vhd_info)
for item in et.findall("PROPERTY"):
name = item.attrib["NAME"]
value_text = item.find("VALUE").text
if name == "ParentPath":
vhd_info_dict[name] = value_text
elif name in ["FileSize", "MaxInternalSize"]:
vhd_info_dict[name] = long(value_text)
elif name in ["InSavedState", "InUse"]:
vhd_info_dict[name] = bool(value_text)
elif name == "Type":
vhd_info_dict[name] = int(value_text)
return vhd_info_dict
def get_vhd_format(self, path):
with open(path, 'rb') as f:
# Read header
if f.read(8) == VHDX_SIGNATURE:
return constants.DISK_FORMAT_VHDX
# Read footer
f.seek(0, 2)
file_size = f.tell()
if file_size >= 512:
f.seek(-512, 2)
if f.read(8) == VHD_SIGNATURE:
return constants.DISK_FORMAT_VHD
raise vmutils.HyperVException(_('Unsupported virtual disk format'))
def get_best_supported_vhd_format(self):
return constants.DISK_FORMAT_VHD
| apache-2.0 |
stasiek/robotframework | src/robot/result/__init__.py | 26 | 1259 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements parsing execution results from XML output files.
The public API of this package is the :func:`~.ExecutionResult` factory
method, which returns :class:`~.Result` objects, and :class:`~.ResultVisitor`
abstract class to ease further processing the results. It is highly
recommended to use the public API via the :mod:`robot.api` package like in
the example below.
This package is considered stable.
Example
-------
.. literalinclude:: /../../doc/api/code_examples/check_test_times.py
"""
from .executionresult import Result
from .resultbuilder import ExecutionResult
from .testsuite import TestSuite
from .visitor import ResultVisitor
| apache-2.0 |
aleksandr-bakanov/astropy | astropy/io/votable/__init__.py | 12 | 1218 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package reads and writes data formats used by the Virtual
Observatory (VO) initiative, particularly the VOTable XML format.
"""
from .table import (
parse, parse_single_table, validate, from_table, is_votable, writeto)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
from astropy import config as _config
__all__ = [
'Conf', 'conf', 'parse', 'parse_single_table', 'validate',
'from_table', 'is_votable', 'writeto', 'VOWarning',
'VOTableChangeWarning', 'VOTableSpecWarning',
'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.io.votable`.
"""
verify = _config.ConfigItem(
'ignore',
"Can be 'exception' (treat fixable violations of the VOTable spec as "
"exceptions), 'warn' (show warnings for VOTable spec violations), or "
"'ignore' (silently ignore VOTable spec violations)",
aliases=['astropy.io.votable.table.pedantic',
'astropy.io.votable.pedantic'])
conf = Conf()
| bsd-3-clause |
tcwicklund/django | tests/migrations/test_base.py | 292 | 4620 | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection
from django.db.migrations.recorder import MigrationRecorder
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
def tearDown(self):
# Reset applied-migrations state.
recorder = MigrationRecorder(connection)
recorder.migration_qs.filter(app='migrations').delete()
def get_table_description(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_table_description(cursor, table)
def assertTableExists(self, table):
with connection.cursor() as cursor:
self.assertIn(table, connection.introspection.table_names(cursor))
def assertTableNotExists(self, table):
with connection.cursor() as cursor:
self.assertNotIn(table, connection.introspection.table_names(cursor))
def assertColumnExists(self, table, column):
self.assertIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNotExists(self, table, column):
self.assertNotIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNull(self, table, column):
self.assertEqual([c.null_ok for c in self.get_table_description(table) if c.name == column][0], True)
def assertColumnNotNull(self, table, column):
self.assertEqual([c.null_ok for c in self.get_table_description(table) if c.name == column][0], False)
def assertIndexExists(self, table, columns, value=True):
with connection.cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['columns'] == list(columns)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertFKExists(self, table, columns, to, value=True):
with connection.cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connection.introspection.get_constraints(cursor, table).values()
if c['columns'] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to, value=True):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label='migrations', module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
temp_dir = tempfile.mkdtemp()
try:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, '__init__.py'), 'w'):
pass
target_migrations_dir = os.path.join(target_dir, 'migrations')
if module is None:
module = apps.get_app_config(app_label).name + '.migrations'
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + '.migrations'
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
finally:
shutil.rmtree(temp_dir)
| bsd-3-clause |
snasoft/QtCreatorPluginsPack | Bin/3rdParty/vera/bin/lib/unittest/signals.py | 162 | 1658 | import signal
import weakref
from functools import wraps
__unittest = True
class _InterruptHandler(object):
def __init__(self, default_handler):
self.called = False
self.default_handler = default_handler
def __call__(self, signum, frame):
installed_handler = signal.getsignal(signal.SIGINT)
if installed_handler is not self:
# if we aren't the installed handler, then delegate immediately
# to the default handler
self.default_handler(signum, frame)
if self.called:
self.default_handler(signum, frame)
self.called = True
for result in _results.keys():
result.stop()
_results = weakref.WeakKeyDictionary()
def registerResult(result):
_results[result] = 1
def removeResult(result):
return bool(_results.pop(result, None))
_interrupt_handler = None
def installHandler():
global _interrupt_handler
if _interrupt_handler is None:
default_handler = signal.getsignal(signal.SIGINT)
_interrupt_handler = _InterruptHandler(default_handler)
signal.signal(signal.SIGINT, _interrupt_handler)
def removeHandler(method=None):
if method is not None:
@wraps(method)
def inner(*args, **kwargs):
initial = signal.getsignal(signal.SIGINT)
removeHandler()
try:
return method(*args, **kwargs)
finally:
signal.signal(signal.SIGINT, initial)
return inner
global _interrupt_handler
if _interrupt_handler is not None:
signal.signal(signal.SIGINT, _interrupt_handler.default_handler)
| lgpl-3.0 |
CMSS-BCRDB/RDS | trove/db/models.py | 4 | 4752 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from trove.db import get_db_api
from trove.db import db_query
from trove.common import exception
from trove.common import models
from trove.common import pagination
from trove.common import utils
from trove.openstack.common import log as logging
from trove.common.i18n import _
LOG = logging.getLogger(__name__)
class DatabaseModelBase(models.ModelBase):
_auto_generated_attrs = ['id']
@classmethod
def create(cls, **values):
init_vals = {
'id': utils.generate_uuid(),
'created': utils.utcnow(),
}
if hasattr(cls, 'deleted'):
init_vals['deleted'] = False
init_vals.update(values)
instance = cls(**init_vals)
if not instance.is_valid():
raise exception.InvalidModelError(errors=instance.errors)
return instance.save()
@property
def db_api(self):
return get_db_api()
@property
def preserve_on_delete(self):
return hasattr(self, 'deleted') and hasattr(self, 'deleted_at')
@classmethod
def query(cls):
return get_db_api()._base_query(cls)
def save(self):
if not self.is_valid():
raise exception.InvalidModelError(errors=self.errors)
self['updated'] = utils.utcnow()
LOG.debug("Saving %(name)s: %(dict)s" %
{'name': self.__class__.__name__, 'dict': self.__dict__})
return self.db_api.save(self)
def delete(self):
self['updated'] = utils.utcnow()
LOG.debug("Deleting %(name)s: %(dict)s" %
{'name': self.__class__.__name__, 'dict': self.__dict__})
if self.preserve_on_delete:
self['deleted_at'] = utils.utcnow()
self['deleted'] = True
return self.db_api.save(self)
else:
return self.db_api.delete(self)
def update(self, **values):
for key in values:
if hasattr(self, key):
setattr(self, key, values[key])
self['updated'] = utils.utcnow()
return self.db_api.save(self)
def __init__(self, **kwargs):
self.merge_attributes(kwargs)
if not self.is_valid():
raise exception.InvalidModelError(errors=self.errors)
def merge_attributes(self, values):
"""dict.update() behaviour."""
for k, v in values.iteritems():
self[k] = v
@classmethod
def find_by(cls, context=None, **conditions):
model = cls.get_by(**conditions)
if model is None:
raise exception.ModelNotFoundError(_("%(s_name)s Not Found") %
{"s_name": cls.__name__})
if ((context and not context.is_admin and hasattr(model, 'tenant_id')
and model.tenant_id != context.tenant)):
msg = _("Tenant %(s_tenant)s tried to access "
"%(s_name)s, owned by %(s_owner)s.")
LOG.error(msg % (
{"s_tenant": context.tenant, "s_name": cls.__name__,
"s_owner": model.tenant_id}))
raise exception.ModelNotFoundError(
_("Tenant %(s_tenant)s cannot access %(s_name)s") % (
{"s_tenant": context.tenant, "s_name": cls.__name__}))
return model
@classmethod
def get_by(cls, **kwargs):
return get_db_api().find_by(cls, **cls._process_conditions(kwargs))
@classmethod
def find_all(cls, **kwargs):
return db_query.find_all(cls, **cls._process_conditions(kwargs))
@classmethod
def _process_conditions(cls, raw_conditions):
"""Override in inheritors to format/modify any conditions."""
return raw_conditions
@classmethod
def find_by_pagination(cls, collection_type, collection_query,
paginated_url, **kwargs):
elements, next_marker = collection_query.paginated_collection(**kwargs)
return pagination.PaginatedDataView(collection_type,
elements,
paginated_url,
next_marker)
| apache-2.0 |
cancan101/tensorflow | tensorflow/contrib/learn/python/learn/estimators/dnn.py | 9 | 31036 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Deep Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn import metric_spec
from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.python.ops import nn
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
_CENTERED_BIAS_WEIGHT = "centered_bias_weight"
# The default learning rate of 0.05 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.05
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def _get_optimizer(optimizer):
if callable(optimizer):
return optimizer()
else:
return optimizer
def _add_hidden_layer_summary(value, tag):
summary.scalar("%s_fraction_of_zero_values" % tag, nn.zero_fraction(value))
summary.histogram("%s_activation" % tag, value)
def _dnn_model_fn(features, labels, mode, params, config=None):
"""Deep Neural Net model_fn.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `_Head` instance.
* hidden_units: List of hidden units per layer.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training. If `None`, will use the Adagrad
optimizer with a default learning rate of 0.05.
* activation_fn: Activation function applied to each layer. If `None`,
will use `tf.nn.relu`.
* dropout: When not `None`, the probability we will drop out a given
coordinate.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* embedding_lr_multipliers: Optional. A dictionary from
`EmbeddingColumn` to a `float` multiplier. Multiplier will be used to
multiply with learning rate for the embedding variables.
* input_layer_min_slice_size: Optional. The min slice size of input layer
partitions. If not provided, will use the default of 64M.
config: `RunConfig` object to configure the runtime settings.
Returns:
predictions: A dict of `Tensor` objects.
loss: A scalar containing the loss of the step.
train_op: The op for training.
"""
head = params["head"]
hidden_units = params["hidden_units"]
feature_columns = params["feature_columns"]
optimizer = params.get("optimizer") or "Adagrad"
activation_fn = params.get("activation_fn")
dropout = params.get("dropout")
gradient_clip_norm = params.get("gradient_clip_norm")
input_layer_min_slice_size = (
params.get("input_layer_min_slice_size") or 64 << 20)
num_ps_replicas = config.num_ps_replicas if config else 0
embedding_lr_multipliers = params.get("embedding_lr_multipliers", {})
features = _get_feature_dict(features)
parent_scope = "dnn"
partitioner = partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas)
with variable_scope.variable_scope(
parent_scope,
values=tuple(six.itervalues(features)),
partitioner=partitioner):
input_layer_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=input_layer_min_slice_size))
with variable_scope.variable_scope(
"input_from_feature_columns",
values=tuple(six.itervalues(features)),
partitioner=input_layer_partitioner) as input_layer_scope:
net = layers.input_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
weight_collections=[parent_scope],
scope=input_layer_scope)
for layer_id, num_hidden_units in enumerate(hidden_units):
with variable_scope.variable_scope(
"hiddenlayer_%d" % layer_id,
values=(net,)) as hidden_layer_scope:
net = layers.fully_connected(
net,
num_hidden_units,
activation_fn=activation_fn,
variables_collections=[parent_scope],
scope=hidden_layer_scope)
if dropout is not None and mode == model_fn.ModeKeys.TRAIN:
net = layers.dropout(net, keep_prob=(1.0 - dropout))
_add_hidden_layer_summary(net, hidden_layer_scope.name)
with variable_scope.variable_scope(
"logits",
values=(net,)) as logits_scope:
logits = layers.fully_connected(
net,
head.logits_dimension,
activation_fn=None,
variables_collections=[parent_scope],
scope=logits_scope)
_add_hidden_layer_summary(logits, logits_scope.name)
def _train_op_fn(loss):
"""Returns the op to optimize the loss."""
return optimizers.optimize_loss(
loss=loss,
global_step=contrib_variables.get_global_step(),
learning_rate=_LEARNING_RATE,
optimizer=_get_optimizer(optimizer),
gradient_multipliers=(
dnn_linear_combined._extract_embedding_lr_multipliers( # pylint: disable=protected-access
embedding_lr_multipliers, parent_scope,
input_layer_scope.name)),
clip_gradients=gradient_clip_norm,
name=parent_scope,
# Empty summaries to prevent optimizers from logging training_loss.
summaries=[])
return head.create_model_fn_ops(
features=features,
mode=mode,
labels=labels,
train_op_fn=_train_op_fn,
logits=logits)
class DNNClassifier(estimator.Estimator):
"""A classifier for TensorFlow DNN models.
Example:
```python
sparse_feature_a = sparse_column_with_hash_bucket(...)
sparse_feature_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_emb = embedding_column(sparse_id_column=sparse_feature_a,
...)
sparse_feature_b_emb = embedding_column(sparse_id_column=sparse_feature_b,
...)
estimator = DNNClassifier(
feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNClassifier(
feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x) # returns predicted labels (i.e. label's class index).
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
embedding_lr_multipliers=None,
input_layer_min_slice_size=None):
"""Initializes a DNNClassifier instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
embedding_lr_multipliers: Optional. A dictionary from `EmbeddingColumn` to
a `float` multiplier. Multiplier will be used to multiply with
learning rate for the embedding variables.
input_layer_min_slice_size: Optional. The min slice size of input layer
partitions. If not provided, will use the default of 64M.
Returns:
A `DNNClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
self._hidden_units = hidden_units
self._feature_columns = tuple(feature_columns or [])
self._enable_centered_bias = enable_centered_bias
super(DNNClassifier, self).__init__(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._multi_class_head( # pylint: disable=protected-access
n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units": hidden_units,
"feature_columns": self._feature_columns,
"optimizer": optimizer,
"activation_fn": activation_fn,
"dropout": dropout,
"gradient_clip_norm": gradient_clip_norm,
"embedding_lr_multipliers": embedding_lr_multipliers,
"input_layer_min_slice_size": input_layer_min_slice_size,
},
feature_engineering_fn=feature_engineering_fn)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
@deprecated_arg_values(
"2017-03-01",
"Please switch to predict_classes, or set `outputs` argument.",
outputs=None)
def predict(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Returns predictions for given features.
By default, returns predicted classes. But this default will be dropped
soon. Users should either pass `outputs`, or call `predict_classes` method.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
outputs: list of `str`, name of the output to predict.
If `None`, returns classes.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted classes with shape [batch_size] (or an iterable
of predicted classes if as_iterable is True). Each predicted class is
represented by its class index (i.e. integer from 0 to n_classes-1).
If `outputs` is set, returns a dict of predictions.
"""
if not outputs:
return self.predict_classes(
x=x,
input_fn=input_fn,
batch_size=batch_size,
as_iterable=as_iterable)
return super(DNNClassifier, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=outputs,
as_iterable=as_iterable)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_classes(self, x=None, input_fn=None, batch_size=None,
as_iterable=True):
"""Returns predicted classes for given features.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted classes with shape [batch_size] (or an iterable
of predicted classes if as_iterable is True). Each predicted class is
represented by its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = super(DNNClassifier, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key].reshape(-1)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self,
x=None,
input_fn=None,
batch_size=None,
as_iterable=True):
"""Returns predicted probabilities for given features.
Args:
x: features.
input_fn: Input function. If set, x and y must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted probabilities with shape [batch_size, n_classes]
(or an iterable of predicted probabilities if as_iterable is True).
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = super(DNNClassifier, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return super(DNNClassifier, self).export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or
export.classification_signature_fn_with_prob),
prediction_key=prediction_key.PredictionKey.PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
hiddenlayer_weights = [
self.get_variable_value("dnn/hiddenlayer_%d/weights" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_weights = [self.get_variable_value("dnn/logits/weights")]
return hiddenlayer_weights + logits_weights
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
hiddenlayer_bias = [
self.get_variable_value("dnn/hiddenlayer_%d/biases" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_bias = [self.get_variable_value("dnn/logits/biases")]
if self._enable_centered_bias:
centered_bias = [self.get_variable_value(_CENTERED_BIAS_WEIGHT)]
else:
centered_bias = []
return hiddenlayer_bias + logits_bias + centered_bias
class DNNRegressor(estimator.Estimator):
"""A regressor for TensorFlow DNN models.
Example:
```python
sparse_feature_a = sparse_column_with_hash_bucket(...)
sparse_feature_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_emb = embedding_column(sparse_id_column=sparse_feature_a,
...)
sparse_feature_b_emb = embedding_column(sparse_id_column=sparse_feature_b,
...)
estimator = DNNRegressor(
feature_columns=[sparse_feature_a, sparse_feature_b],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNRegressor(
feature_columns=[sparse_feature_a, sparse_feature_b],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, y
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
label_dimension=1,
embedding_lr_multipliers=None,
input_layer_min_slice_size=None):
"""Initializes a `DNNRegressor` instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
embedding_lr_multipliers: Optional. A dictionary from `EbeddingColumn` to
a `float` multiplier. Multiplier will be used to multiply with
learning rate for the embedding variables.
input_layer_min_slice_size: Optional. The min slice size of input layer
partitions. If not provided, will use the default of 64M.
Returns:
A `DNNRegressor` estimator.
"""
self._feature_columns = tuple(feature_columns or [])
super(DNNRegressor, self).__init__(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._regression_head( # pylint: disable=protected-access
label_dimension=label_dimension,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units": hidden_units,
"feature_columns": self._feature_columns,
"optimizer": optimizer,
"activation_fn": activation_fn,
"dropout": dropout,
"gradient_clip_norm": gradient_clip_norm,
"embedding_lr_multipliers": embedding_lr_multipliers,
"input_layer_min_slice_size": input_layer_min_slice_size,
},
feature_engineering_fn=feature_engineering_fn)
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None):
"""See evaluable.Evaluable."""
# TODO(zakaria): remove once deprecation is finished (b/31229024)
custom_metrics = {}
if metrics:
for key, metric in six.iteritems(metrics):
if (not isinstance(metric, metric_spec.MetricSpec) and
not isinstance(key, tuple)):
custom_metrics[(key, prediction_key.PredictionKey.SCORES)] = metric
else:
custom_metrics[key] = metric
return super(DNNRegressor, self).evaluate(
x=x,
y=y,
input_fn=input_fn,
feed_fn=feed_fn,
batch_size=batch_size,
steps=steps,
metrics=custom_metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
@deprecated_arg_values(
"2017-03-01",
"Please switch to predict_scores, or set `outputs` argument.",
outputs=None)
def predict(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Returns predictions for given features.
By default, returns predicted scores. But this default will be dropped
soon. Users should either pass `outputs`, or call `predict_scores` method.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
outputs: list of `str`, name of the output to predict.
If `None`, returns scores.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted scores (or an iterable of predicted scores if
as_iterable is True). If `label_dimension == 1`, the shape of the output
is `[batch_size]`, otherwise the shape is `[batch_size, label_dimension]`.
If `outputs` is set, returns a dict of predictions.
"""
if not outputs:
return self.predict_scores(
x=x,
input_fn=input_fn,
batch_size=batch_size,
as_iterable=as_iterable)
return super(DNNRegressor, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=outputs,
as_iterable=as_iterable)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_scores(self, x=None, input_fn=None, batch_size=None,
as_iterable=True):
"""Returns predicted scores for given features.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted scores (or an iterable of predicted scores if
as_iterable is True). If `label_dimension == 1`, the shape of the output
is `[batch_size]`, otherwise the shape is `[batch_size, label_dimension]`.
"""
key = prediction_key.PredictionKey.SCORES
preds = super(DNNRegressor, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return super(DNNRegressor, self).export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=signature_fn or export.regression_signature_fn,
prediction_key=prediction_key.PredictionKey.SCORES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
| apache-2.0 |
alrusdi/python-social-auth | social/tests/backends/base.py | 80 | 6160 | import unittest2 as unittest
import requests
from httpretty import HTTPretty
from social.utils import module_member, parse_qs
from social.backends.utils import user_backends_data, load_backends
from social.tests.strategy import TestStrategy
from social.tests.models import User, TestUserSocialAuth, TestNonce, \
TestAssociation, TestCode, TestStorage
class BaseBackendTest(unittest.TestCase):
backend = None
backend_path = None
name = None
complete_url = ''
raw_complete_url = '/complete/{0}'
def setUp(self):
HTTPretty.enable()
Backend = module_member(self.backend_path)
self.strategy = TestStrategy(TestStorage)
self.backend = Backend(self.strategy, redirect_uri=self.complete_url)
self.name = self.backend.name.upper().replace('-', '_')
self.complete_url = self.strategy.build_absolute_uri(
self.raw_complete_url.format(self.backend.name)
)
backends = (self.backend_path,
'social.tests.backends.test_broken.BrokenBackendAuth')
self.strategy.set_settings({
'SOCIAL_AUTH_AUTHENTICATION_BACKENDS': backends
})
self.strategy.set_settings(self.extra_settings())
# Force backends loading to trash PSA cache
load_backends(backends, force_load=True)
User.reset_cache()
TestUserSocialAuth.reset_cache()
TestNonce.reset_cache()
TestAssociation.reset_cache()
TestCode.reset_cache()
def tearDown(self):
HTTPretty.disable()
self.backend = None
self.strategy = None
self.name = None
self.complete_url = None
User.reset_cache()
TestUserSocialAuth.reset_cache()
TestNonce.reset_cache()
TestAssociation.reset_cache()
TestCode.reset_cache()
def extra_settings(self):
return {}
def do_start(self):
raise NotImplementedError('Implement in subclass')
def do_login(self):
user = self.do_start()
username = self.expected_username
self.assertEqual(user.username, username)
self.assertEqual(self.strategy.session_get('username'), username)
self.assertEqual(self.strategy.get_user(user.id), user)
self.assertEqual(self.backend.get_user(user.id), user)
user_backends = user_backends_data(
user,
self.strategy.get_setting('SOCIAL_AUTH_AUTHENTICATION_BACKENDS'),
self.strategy.storage
)
self.assertEqual(len(list(user_backends.keys())), 3)
self.assertEqual('associated' in user_backends, True)
self.assertEqual('not_associated' in user_backends, True)
self.assertEqual('backends' in user_backends, True)
self.assertEqual(len(user_backends['associated']), 1)
self.assertEqual(len(user_backends['not_associated']), 1)
self.assertEqual(len(user_backends['backends']), 2)
return user
def pipeline_settings(self):
self.strategy.set_settings({
'SOCIAL_AUTH_PIPELINE': (
'social.pipeline.social_auth.social_details',
'social.pipeline.social_auth.social_uid',
'social.pipeline.social_auth.auth_allowed',
'social.pipeline.partial.save_status_to_session',
'social.tests.pipeline.ask_for_password',
'social.tests.pipeline.ask_for_slug',
'social.pipeline.social_auth.social_user',
'social.pipeline.user.get_username',
'social.pipeline.social_auth.associate_by_email',
'social.pipeline.user.create_user',
'social.pipeline.social_auth.associate_user',
'social.pipeline.social_auth.load_extra_data',
'social.tests.pipeline.set_password',
'social.tests.pipeline.set_slug',
'social.pipeline.user.user_details'
)
})
def pipeline_handlers(self, url):
HTTPretty.register_uri(HTTPretty.GET, url, status=200, body='foobar')
HTTPretty.register_uri(HTTPretty.POST, url, status=200)
def pipeline_password_handling(self, url):
password = 'foobar'
requests.get(url)
requests.post(url, data={'password': password})
data = parse_qs(HTTPretty.last_request.body)
self.assertEqual(data['password'], password)
self.strategy.session_set('password', data['password'])
return password
def pipeline_slug_handling(self, url):
slug = 'foo-bar'
requests.get(url)
requests.post(url, data={'slug': slug})
data = parse_qs(HTTPretty.last_request.body)
self.assertEqual(data['slug'], slug)
self.strategy.session_set('slug', data['slug'])
return slug
def do_partial_pipeline(self):
url = self.strategy.build_absolute_uri('/password')
self.pipeline_settings()
redirect = self.do_start()
self.assertEqual(redirect.url, url)
self.pipeline_handlers(url)
password = self.pipeline_password_handling(url)
data = self.strategy.session_pop('partial_pipeline')
idx, backend, xargs, xkwargs = self.strategy.partial_from_session(data)
self.assertEqual(backend, self.backend.name)
redirect = self.backend.continue_pipeline(pipeline_index=idx,
*xargs, **xkwargs)
url = self.strategy.build_absolute_uri('/slug')
self.assertEqual(redirect.url, url)
self.pipeline_handlers(url)
slug = self.pipeline_slug_handling(url)
data = self.strategy.session_pop('partial_pipeline')
idx, backend, xargs, xkwargs = self.strategy.partial_from_session(data)
self.assertEqual(backend, self.backend.name)
user = self.backend.continue_pipeline(pipeline_index=idx,
*xargs, **xkwargs)
self.assertEqual(user.username, self.expected_username)
self.assertEqual(user.slug, slug)
self.assertEqual(user.password, password)
return user
| bsd-3-clause |
x111ong/django | tests/urlpatterns_reverse/tests.py | 154 | 50058 | # -*- coding: utf-8 -*-
"""
Unit tests for reverse URL lookups.
"""
from __future__ import unicode_literals
import sys
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.conf.urls import include, url
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.core.urlresolvers import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, Resolver404,
ResolverMatch, get_callable, get_resolver, resolve, reverse, reverse_lazy,
)
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import override_script_prefix
from django.utils import six
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
from . import middleware, urlconf_outer, views
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/normal/42/37/', 'inc-normal-view', '', '', 'inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/view_class/42/37/', 'inc-view-class', '', '', 'inc-view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
('/included/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
('/included/12/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
# Unnamed views should have None as the url_name. Regression data for #21157.
('/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/12/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view, ('12', '42', '37'), {}),
# Namespaces
('/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/normal/42/37/', 'inc-normal-view', '', 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
# Nested namespaces
('/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/app-included/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp', 'inc-app:test-ns3', 'inc-app:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp', 'inc-app:inc-ns4:inc-ns2:test-ns3', 'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
# Namespaces capturing variables
('/inc70/', 'inner-nothing', '', 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, tuple(), {'outer': '70'}),
('/inc78/extra/foobar/', 'inner-extra', '', 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, tuple(), {'outer': '78', 'extra': 'foobar'}),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('named_optional', '/optional/1/', [1], {}),
('named_optional', '/optional/1/', [], {'arg1': 1}),
('named_optional', '/optional/1/2/', [1, 2], {}),
('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('named_optional_terminated', '/optional/1/2/', [1, 2], {}),
('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
('windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [], dict(drive_name='C', path=r'Documents and Settings\spam')),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}),
('nested-capture', '/nested/capture/opt/', ['opt/'], {}),
('nested-capture', NoReverseMatch, [], {'p': 'opt'}),
('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}),
('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}),
('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}),
('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}),
('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}),
# Regression for #9038
# These views are resolved by method name. Each method is deployed twice -
# once with an explicit argument, and once using the default value on
# the method. This is potentially ambiguous, as you have to pick the
# correct view for the arguments provided.
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/', [], {}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/10/', [], {'arg1': 10}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
class URLObject(object):
urlpatterns = [
url(r'^inner/$', views.empty_view, name='urlobject-view'),
url(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
url(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
def __init__(self, app_name, namespace=None):
self.app_name = app_name
self.namespace = namespace
@property
def urls(self):
return self.urlpatterns, self.app_name, self.namespace
@property
def app_urls(self):
return self.urlpatterns, self.app_name
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
RegexURLResolver should raise an exception when no urlpatterns exist.
"""
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
self.assertRaisesMessage(
ImproperlyConfigured,
"The included urlconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import.",
getattr, resolver, 'url_patterns'
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango110Warning)
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
self.assertRaises(NoReverseMatch, reverse, None)
@override_script_prefix('/{{invalid}}/')
def test_prefix_braces(self):
self.assertEqual(
'/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include')
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix('/bogus)/'):
self.assertEqual(
'/bogus)/includes/non_path_include/',
reverse('non_path_include')
)
with override_script_prefix('/(bogus)/'):
self.assertEqual(
'/(bogus)/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/bump%20map/')
def test_prefix_format_char(self):
self.assertEqual(
'/bump%2520map/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/%7Eme/')
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1]))
def test_patterns_reported(self):
# Regression for #17076
try:
# this url exists, but requires an argument
reverse("people", args=[])
except NoReverseMatch as e:
pattern_description = r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
self.assertIn(pattern_description, str(e))
else:
# we can't use .assertRaises, since we want to inspect the
# exception
self.fail("Expected a NoReverseMatch, but none occurred.")
@override_script_prefix('/script:name/')
def test_script_name_escaping(self):
self.assertEqual(
reverse('optional', args=['foo:bar']),
'/script:name/optional/foo:bar/'
)
def test_reverse_returns_unicode(self):
name, expected, args, kwargs = test_data[0]
self.assertIsInstance(
reverse(name, args=args, kwargs=kwargs),
six.text_type
)
class ResolverTests(unittest.TestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_repr(self):
"""
Test repr of RegexURLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced urlconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
try:
resolver.resolve(proxy_url)
except TypeError:
self.fail('Failed to coerce lazy object to text')
def test_non_regex(self):
"""
Verifies that we raise a Resolver404 if what we are resolving doesn't
meet the basic requirements of a path to match - i.e., at the very
least, it matches the root pattern '^/'. We must never return None
from resolve, or we will get a TypeError further down the line.
Regression for #10834.
"""
self.assertRaises(Resolver404, resolve, '')
self.assertRaises(Resolver404, resolve, 'a')
self.assertRaises(Resolver404, resolve, '\\')
self.assertRaises(Resolver404, resolve, '.')
def test_404_tried_urls_have_names(self):
"""
Verifies that the list of URLs that come back from a Resolver404
exception contains a list in the right format for printing out in
the DEBUG 404 page with both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
try:
resolve('/included/non-existent-url', urlconf=urls)
self.fail('resolve did not raise a 404')
except Resolver404 as e:
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
tried = e.args[0]['tried']
self.assertEqual(len(e.args[0]['tried']), len(url_types_names), 'Wrong number of tried URLs returned. Expected %s, got %s.' % (len(url_types_names), len(e.args[0]['tried'])))
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(t.name, e['name'], 'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name))
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.login(username='alfred', password='testpw')
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
if six.PY2:
self.assertEqual(
b'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
Test that reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings('settings.py', extra="""
from django.core.urlresolvers import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
self.assertRaises(NoReverseMatch, redirect, 'not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_reverse_by_path_nested(self):
# Views that are added to urlpatterns using include() should be
# reversible by dotted path.
self.assertEqual(reverse('urlpatterns_reverse.views.nested_view'), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
self.assertRaises(NoReverseMatch, redirect, absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
@ignore_warnings(category=RemovedInDjango20Warning)
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing')
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
"Non-existent namespaces raise errors"
self.assertRaises(NoReverseMatch, reverse, 'blahblah:urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
"Normal lookups work as expected"
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
"Normal lookups work on names included from other patterns"
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
"Dynamic URL objects can be found using a namespace"
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_app_object(self):
"Dynamic URL objects can return a (pattern, app_name) 2-tuple, and include() can set the namespace"
self.assertEqual('/newapp1/inner/', reverse('new-ns1:urlobject-view'))
self.assertEqual('/newapp1/inner/37/42/', reverse('new-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/newapp1/inner/42/37/', reverse('new-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/newapp1/inner/+%5C$*/', reverse('new-ns1:urlobject-special-view'))
def test_app_object_default_namespace(self):
"Namespace defaults to app_name when including a (pattern, app_name) 2-tuple"
self.assertEqual('/new-default/inner/', reverse('newapp:urlobject-view'))
self.assertEqual('/new-default/inner/37/42/', reverse('newapp:urlobject-view', args=[37, 42]))
self.assertEqual('/new-default/inner/42/37/', reverse('newapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/new-default/inner/+%5C$*/', reverse('newapp:urlobject-special-view'))
def test_embedded_namespace_object(self):
"Namespaces can be installed anywhere in the URL pattern tree"
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
"Namespaces can be applied to include()'d urlpatterns"
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual('/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_app_name_pattern(self):
"Namespaces can be applied to include()'d urlpatterns that set an app_name attribute"
self.assertEqual('/app-included1/normal/', reverse('app-ns1:inc-normal-view'))
self.assertEqual('/app-included1/normal/37/42/', reverse('app-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual('/app-included1/normal/42/37/', reverse('app-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/app-included1/+%5C$*/', reverse('app-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
"When using an include with namespaces when there is a regex variable in front of it"
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4}))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
"Namespaces can be embedded"
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/test3/inner/42/37/', reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
"Namespaces can be nested"
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/37/42/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/42/37/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view'))
def test_app_lookup_object(self):
"A default application namespace can be used for lookup"
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
"A default application namespace is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual('/included/test3/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3'))
self.assertEqual('/included/test3/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3'))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3'))
def test_app_lookup_object_without_default(self):
"An application namespace without a default is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual('/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1'))
self.assertEqual('/other1/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1'))
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual('/+%5C$*/included/normal/42/37/', reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
"Namespace prefixes can capture variables: see #15900"
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'}))
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
def test_nested_app_lookup(self):
"A nested current_app should be split in individual namespaces (#24904)"
self.assertEqual('/ns-included1/test4/inner/', reverse('inc-ns1:testapp:urlobject-view'))
self.assertEqual('/ns-included1/test4/inner/37/42/', reverse('inc-ns1:testapp:urlobject-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/test4/inner/+%5C$*/', reverse('inc-ns1:testapp:urlobject-special-view'))
self.assertEqual(
'/ns-included1/test3/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='inc-ns1:test-ns3')
)
def test_current_app_no_partial_match(self):
"current_app should either match the whole path or shouldn't be used"
self.assertEqual(
'/ns-included1/test4/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='non-existant:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='non-existant:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37},
current_app='non-existant:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='non-existant:test-ns3')
)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve_error_handler(400), handler)
self.assertEqual(self.resolver.resolve_error_handler(404), handler)
self.assertEqual(self.resolver.resolve_error_handler(500), handler)
def test_callable_handers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
try:
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 404 handler")
try:
self.assertRaises(ValueError, self.client.get, '/bad_view/')
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 500 handler")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if urlconf is None"""
def test_no_handler_exception(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_urlpattern_resolve(self):
for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
# Test legacy support for extracting "function, args, kwargs"
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# Test ResolverMatch capabilities.
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# ... and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(SimpleTestCase):
def test_erroneous_resolve(self):
self.assertRaises(ImportError, self.client.get, '/erroneous_inner/')
self.assertRaises(ImportError, self.client.get, '/erroneous_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_inner/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable-dotted/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable-object/')
# Regression test for #21157
self.assertRaises(ImportError, self.client.get, '/erroneous_unqualified/')
def test_erroneous_reverse(self):
"""
Ensure that a useful exception is raised when a regex is invalid in the
URLConf (#6170).
"""
# The regex error will be hit before NoReverseMatch can be raised
self.assertRaises(ImproperlyConfigured, reverse, 'whatever blah blah')
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view)
# passing a callable should return the callable
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
# A missing view (identified by an AttributeError) should raise
# ViewDoesNotExist, ...
with six.assertRaisesRegex(self, ViewDoesNotExist, ".*View does not exist in.*"):
get_callable('urlpatterns_reverse.views.i_should_not_exist')
# ... but if the AttributeError is caused by something else don't
# swallow it.
with self.assertRaises(AttributeError):
get_callable('urlpatterns_reverse.views_broken.i_am_broken')
class IncludeTests(SimpleTestCase):
url_patterns = [
url(r'^inner/$', views.empty_view, name='urlobject-view'),
url(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
url(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
app_urls = URLObject('inc-app')
def test_include_app_name_but_no_namespace(self):
msg = "Must specify a namespace if specifying app_name."
with self.assertRaisesMessage(ValueError, msg):
include(self.url_patterns, app_name='bar')
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace(self):
# no app_name -> deprecated
self.assertEqual(include(self.url_patterns, 'namespace'), (self.url_patterns, None, 'namespace'))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace_app_name(self):
# app_name argument to include -> deprecated
self.assertEqual(
include(self.url_patterns, 'namespace', 'app_name'),
(self.url_patterns, 'app_name', 'namespace')
)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_3_tuple(self):
# 3-tuple -> deprecated
self.assertEqual(
include((self.url_patterns, 'app_name', 'namespace')),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, 'app_name')),
(self.url_patterns, 'app_name', 'app_name')
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, 'app_name'), namespace='namespace'),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_app_name(self):
self.assertEqual(
include(self.app_urls),
(self.app_urls, 'inc-app', 'inc-app')
)
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, 'namespace'),
(self.app_urls, 'inc-app', 'namespace')
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
'/lookahead-/a-city/',
'/lookbehind-/a-city/',
'/lookahead+/a-city/',
'/lookbehind+/a-city/',
]
for test_url in test_urls:
match = resolve(test_url)
self.assertEqual(match.kwargs, {'city': 'a-city'})
def test_invalid_resolve(self):
test_urls = [
'/lookahead-/not-a-city/',
'/lookbehind-/not-a-city/',
'/lookahead+/other-city/',
'/lookbehind+/other-city/',
]
for test_url in test_urls:
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
url = reverse('lookahead-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead+/a-city/')
url = reverse('lookahead-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead-/a-city/')
url = reverse('lookbehind-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind+/a-city/')
url = reverse('lookbehind-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind-/a-city/')
def test_invalid_reverse(self):
with self.assertRaises(NoReverseMatch):
reverse('lookahead-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookahead-negative', kwargs={'city': 'not-a-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-negative', kwargs={'city': 'not-a-city'})
| bsd-3-clause |
anurag-ks/eden | modules/s3menus.py | 1 | 81417 | # -*- coding: utf-8 -*-
""" Sahana Eden Menu Structure and Layout
@copyright: 2011-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3MainMenu",
"S3OptionsMenu",
)
import re
from gluon import *
from gluon.storage import Storage
from s3 import *
from s3layouts import *
# =============================================================================
class S3MainMenu(object):
""" The default configurations for the main application menu """
# -------------------------------------------------------------------------
@classmethod
def menu(cls):
main_menu = MM()(
# Modules-menu, align-left
cls.menu_modules(),
# Service menus, align-right
# Note: always define right-hand items in reverse order!
cls.menu_help(right=True),
cls.menu_lang(right=True),
cls.menu_gis(right=True),
cls.menu_auth(right=True),
cls.menu_admin(right=True),
)
return main_menu
# -------------------------------------------------------------------------
@classmethod
def menu_modules(cls):
# ---------------------------------------------------------------------
# Modules Menu
# @todo: this is very ugly - cleanup or make a better solution
# @todo: probably define the menu explicitly?
#
menu_modules = []
all_modules = current.deployment_settings.modules
# Home always 1st
module = all_modules["default"]
menu_modules.append(MM(module.name_nice, c="default", f="index"))
# Modules to hide due to insufficient permissions
hidden_modules = current.auth.permission.hidden_modules()
# The Modules to display at the top level (in order)
for module_type in [1, 2, 3, 4, 5, 6, 7, 8, 9]:
for module in all_modules:
if module in hidden_modules:
continue
_module = all_modules[module]
if (_module.module_type == module_type):
if not _module.access:
menu_modules.append(MM(_module.name_nice, c=module, f="index"))
else:
groups = re.split("\|", _module.access)[1:-1]
menu_modules.append(MM(_module.name_nice,
c=module,
f="index",
restrict=groups))
# Modules to display off the 'more' menu
modules_submenu = []
for module in all_modules:
if module in hidden_modules:
continue
_module = all_modules[module]
if (_module.module_type == 10):
if not _module.access:
modules_submenu.append(MM(_module.name_nice, c=module, f="index"))
else:
groups = re.split("\|", _module.access)[1:-1]
modules_submenu.append(MM(_module.name_nice,
c=module,
f="index",
restrict=groups))
if modules_submenu:
# Only show the 'more' menu if there are entries in the list
module_more_menu = MM("more", link=False)(modules_submenu)
menu_modules.append(module_more_menu)
return menu_modules
# -------------------------------------------------------------------------
@classmethod
def menu_lang(cls, **attr):
""" Language menu """
settings = current.deployment_settings
if not settings.get_L10n_display_toolbar():
return None
languages = current.response.s3.l10n_languages
request = current.request
menu_lang = MM("Language", **attr)
for language in languages:
menu_lang.append(MM(languages[language], r=request,
translate=False,
selectable=False,
vars={"_language":language},
ltr=True
))
return menu_lang
# -------------------------------------------------------------------------
@classmethod
def menu_help(cls, **attr):
""" Help Menu """
menu_help = MM("Help", c="default", f="help", **attr)(
MM("Contact us", f="contact"),
MM("About", f="about")
)
# -------------------------------------------------------------------
# Now add the available guided tours to the help menu
# check that a guided_tour is enabled
if current.deployment_settings.get_base_guided_tour():
# load the guided tour configuration from the database
table = current.s3db.tour_config
logged_in = current.auth.is_logged_in()
if logged_in:
query = (table.deleted == False) &\
(table.role != "")
else:
query = (table.deleted == False) &\
(table.role == "")
tours = current.db(query).select(table.id,
table.name,
table.controller,
table.function,
table.role,
)
if len(tours) > 0:
menu_help.append(SEP())
for row in tours:
menu_help.append(MM(row.name,
c=row.controller,
f=row.function,
vars={"tour":row.id},
restrict=row.role
)
)
return menu_help
# -------------------------------------------------------------------------
@classmethod
def menu_auth(cls, **attr):
""" Auth Menu """
auth = current.auth
logged_in = auth.is_logged_in()
if not logged_in:
request = current.request
login_next = URL(args=request.args, vars=request.vars)
if request.controller == "default" and \
request.function == "user" and \
"_next" in request.get_vars:
login_next = request.get_vars["_next"]
self_registration = current.deployment_settings.get_security_registration_visible()
if self_registration == "index":
register = MM("Register", c="default", f="index", m="register",
vars=dict(_next=login_next),
check=self_registration)
else:
register = MM("Register", m="register",
vars=dict(_next=login_next),
check=self_registration)
menu_auth = MM("Login", c="default", f="user", m="login",
_id="auth_menu_login",
vars=dict(_next=login_next), **attr)(
MM("Login", m="login",
vars=dict(_next=login_next)),
register,
MM("Lost Password", m="retrieve_password")
)
else:
# Logged-in
menu_auth = MM(auth.user.email, c="default", f="user",
translate=False, link=False, _id="auth_menu_email",
**attr)(
MM("Logout", m="logout", _id="auth_menu_logout"),
MM("User Profile", m="profile"),
MM("Personal Data", c="default", f="person", m="update"),
MM("Contact Details", c="pr", f="person",
args="contact",
vars={"person.pe_id" : auth.user.pe_id}),
#MM("Subscriptions", c="pr", f="person",
# args="pe_subscription",
# vars={"person.pe_id" : auth.user.pe_id}),
MM("Change Password", m="change_password"),
SEP(),
MM({"name": current.T("Rapid Data Entry"),
"id": "rapid_toggle",
"value": current.session.s3.rapid_data_entry is True},
f="rapid"),
)
return menu_auth
# -------------------------------------------------------------------------
@classmethod
def menu_admin(cls, **attr):
""" Administrator Menu """
s3_has_role = current.auth.s3_has_role
settings = current.deployment_settings
name_nice = settings.modules["admin"].name_nice
if s3_has_role("ADMIN"):
translate = settings.has_module("translate")
menu_admin = MM(name_nice, c="admin", **attr)(
MM("Settings", f="setting"),
MM("Users", f="user"),
MM("Person Registry", c="pr"),
MM("Database", c="appadmin", f="index"),
MM("Error Tickets", f="errors"),
MM("Synchronization", c="sync", f="index"),
MM("Translation", c="admin", f="translate",
check=translate),
MM("Test Results", f="result"),
)
elif s3_has_role("ORG_ADMIN"):
menu_admin = MM(name_nice, c="admin", f="user", **attr)()
else:
menu_admin = None
return menu_admin
# -------------------------------------------------------------------------
@classmethod
def menu_gis(cls, **attr):
""" GIS Config Menu """
settings = current.deployment_settings
if not settings.get_gis_menu():
return None
T = current.T
db = current.db
auth = current.auth
s3db = current.s3db
request = current.request
s3 = current.session.s3
_config = s3.gis_config_id
# See if we need to switch config before we decide which
# config item to mark as active:
if "_config" in request.get_vars:
# The user has just selected a config from the GIS menu
try:
config = int(request.get_vars._config)
except ValueError:
# Manually-crafted URL?
pass
else:
if _config is None or _config != config:
# Set this as the current config
s3.gis_config_id = config
cfg = current.gis.get_config()
s3.location_filter = cfg.region_location_id
if settings.has_module("event"):
# See if this config is associated with an Incident
table = s3db.event_config
query = (table.config_id == config)
incident = db(query).select(table.incident_id,
limitby=(0, 1)).first()
if incident:
s3.incident = incident.incident_id
else:
s3.incident = None
# Don't use the outdated cache for this call
cache = None
else:
cache = s3db.cache
# Check if there are multiple GIS Configs for the user to switch between
table = s3db.gis_menu
ctable = s3db.gis_config
query = (table.pe_id == None)
if auth.is_logged_in():
# @ToDo: Search for OUs too (API call)
query |= (table.pe_id == auth.user.pe_id)
query &= (table.config_id == ctable.id)
configs = db(query).select(ctable.id, ctable.name, cache=cache)
gis_menu = MM(settings.get_gis_menu(),
c=request.controller,
f=request.function,
**attr)
args = request.args
if len(configs):
# Use short names for the site and personal configs else they'll wrap.
# Provide checkboxes to select between pages
gis_menu(
MM({"name": T("Default"),
"id": "gis_menu_id_0",
# @ToDo: Show when default item is selected without having
# to do a DB query to read the value
#"value": _config is 0,
"request_type": "load"
}, args=args, vars={"_config": 0}
)
)
for config in configs:
gis_menu(
MM({"name": config.name,
"id": "gis_menu_id_%s" % config.id,
"value": _config == config.id,
"request_type": "load"
}, args=args, vars={"_config": config.id}
)
)
return gis_menu
# =============================================================================
class S3OptionsMenu(object):
"""
The default configurations for options menus
Define one function per controller with the controller prefix as
function name and with "self" as its only argument (must be an
instance method!), and let it return the controller menu
definition as an instance of the layout (=an S3NavigationItem
subclass, standard: M).
In the standard layout, the main item in a controller menu does
not have a label. If you want to re-use a menu for multiple
controllers, do *not* define a controller setting (c="xxx") in
the main item.
"""
def __init__(self, name):
""" Constructor """
try:
self.menu = getattr(self, name)()
except:
self.menu = None
# -------------------------------------------------------------------------
def admin(self):
""" ADMIN menu """
ADMIN = current.session.s3.system_roles.ADMIN
settings_messaging = self.settings_messaging()
translate = current.deployment_settings.has_module("translate")
# NB: Do not specify a controller for the main menu to allow
# re-use of this menu by other controllers
return M(restrict=[ADMIN])(
M("Settings", c="admin", f="setting")(
settings_messaging,
),
M("User Management", c="admin", f="user")(
M("Create User", m="create"),
M("List All Users"),
M("Import Users", m="import"),
M("List All Roles", f="role"),
M("List All Organization Approvers & Whitelists", f="organisation"),
#M("Roles", f="group"),
#M("Membership", f="membership"),
),
M("Database", c="appadmin", f="index")(
M("Raw Database access", c="appadmin", f="index")
),
M("Error Tickets", c="admin", f="errors"),
M("Synchronization", c="sync", f="index")(
M("Settings", f="config", args=[1], m="update"),
M("Repositories", f="repository"),
M("Log", f="log"),
),
#M("Edit Application", a="admin", c="default", f="design",
#args=[request.application]),
M("Translation", c="admin", f="translate", check=translate)(
M("Select Modules for translation", c="admin", f="translate",
m="create", vars=dict(opt="1")),
M("Upload translated files", c="admin", f="translate",
m="create", vars=dict(opt="2")),
M("View Translation Percentage", c="admin", f="translate",
m="create", vars=dict(opt="3")),
M("Add strings manually", c="admin", f="translate",
m="create", vars=dict(opt="4"))
),
M("View Test Result Reports", c="admin", f="result"),
M("Portable App", c="admin", f="portable")
)
# -------------------------------------------------------------------------
@staticmethod
def assess():
""" ASSESS Menu """
#ADMIN = current.session.s3.system_roles.ADMIN
return M(c="assess")(
M("Building Assessments", f="building")(
M("Create", m="create"),
M("Map", m="map"),
),
M("Canvassing", f="canvass")(
M("Create", m="create"),
M("Map", m="map"),
),
#M("Rapid Assessments", f="rat")(
# M("Create", m="create"),
#),
#M("Impact Assessments", f="assess")(
# #M("Create", m="create"),
# M("Create", f="basic_assess", p="create"),
# #M("Search"),
# M("Mobile", f="mobile_basic_assess"),
#),
##M("Baseline Data")(
# #M("Population", f="population"),
##),
#M("Edit Options", restrict=ADMIN)(
# M("List / Add Baseline Types", f="baseline_type"),
# M("List / Add Impact Types", f="impact_type"),
#)
)
# -------------------------------------------------------------------------
@staticmethod
def asset():
""" ASSET Controller """
ADMIN = current.session.s3.system_roles.ADMIN
telephones = lambda i: current.deployment_settings.get_asset_telephones()
return M(c="asset")(
M("Assets", f="asset", m="summary")(
M("Create", m="create"),
#M("Map", m="map"),
M("Import", m="import", p="create"),
),
M("Telephones", f="telephone", m="summary",
check=telephones)(
M("Create", m="create"),
#M("Map", m="map"),
M("Import", m="import", p="create"),
),
#M("Brands", f="brand",
# restrict=[ADMIN])(
# M("Create", m="create"),
#),
M("Items", f="item", m="summary")(
M("Create", m="create"),
M("Import", f="catalog_item", m="import", p="create"),
),
M("Item Categories", f="item_category",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Catalogs", f="catalog",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Suppliers", f="supplier")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def budget():
""" BUDGET Controller """
return M(c="budget")(
M("Budgets", f="budget")(
M("Create", m="create"),
),
M("Staff Types", f="staff")(
M("Create", m="create"),
),
M("Projects", f="project")(
M("Create", m="create"),
),
M("Locations", f="location")(
M("Create", m="create"),
),
M("Bundles", f="bundle")(
M("Create", m="create"),
),
M("Kits", f="kit")(
M("Create", m="create"),
),
M("Items", f="item")(
M("Create", m="create"),
),
M("Parameters", f="parameter"),
)
# -------------------------------------------------------------------------
@staticmethod
def building():
""" BUILDING Controller """
return M(c="building")(
M("NZSEE Level 1", f="nzseel1")(
M("Submit New (triage)", m="create",
vars={"triage":1}),
M("Submit New (full form)", m="create"),
),
M("NZSEE Level 2", f="nzseel2")(
M("Submit New", m="create"),
),
M("Report", f="index")(
M("Snapshot", f="report"),
M("Assessment timeline", f="timeline"),
M("Assessment admin level", f="adminLevel"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def cap():
""" CAP menu """
return M(c="cap")(
M("Alerts", f="alert")(
M("Create", m="create"),
M("Import from CSV", m="import", p="create"),
M("Import from Feed URL", m="import_feed", p="create"),
),
M("Templates", f="template")(
M("Create", m="create"),
),
#M("CAP Profile", f="profile")(
# M("Edit profile", f="profile")
#)
)
# -------------------------------------------------------------------------
@staticmethod
def cr():
""" CR / Shelter Registry """
ADMIN = current.session.s3.system_roles.ADMIN
if current.deployment_settings.get_ui_label_camp():
shelter = "Camps"
types = "Camp Settings"
else:
shelter = "Shelters"
types = "Shelter Settings"
return M(c="cr")(
M(shelter, f="shelter")(
M("Create", m="create"),
M("Map", m="map"),
M("Report", m="report"),
M("Import", m="import", p="create"),
),
M(types, restrict=[ADMIN])(
M("Types", f="shelter_type"),
M("Services", f="shelter_service"),
)
)
# -------------------------------------------------------------------------
@staticmethod
def cms():
""" CMS / Content Management System """
return M(c="cms")(
M("Series", f="series")(
M("Create", m="create"),
M("View as Pages", f="blog"),
),
M("Posts", f="post")(
M("Create", m="create"),
M("View as Pages", f="page"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def dc():
""" Data Collection Tool """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="dc")(
M("Templates", f="template")(
M("Create", m="create"),
),
M("Questions", f="question")(
M("Create", m="create"),
),
M("Data Collections", f="collection")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def delphi():
""" DELPHI / Delphi Decision Maker """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="delphi")(
M("Active Problems", f="problem")(
M("Create", m="create"),
),
M("Groups", f="group")(
M("Create", m="create"),
),
#M("Solutions", f="solution"),
#M("Administration", restrict=[ADMIN])(
#M("Groups", f="group"),
#M("Group Memberships", f="membership"),
#M("Problems", f="problem"),
#)
)
# -------------------------------------------------------------------------
@staticmethod
def deploy():
""" Deployments """
return M()(M("Missions",
c="deploy", f="mission", m="summary")(
M("Create", m="create"),
M("Active Missions", m="summary",
vars={"~.status__belongs": "2"}),
),
M("Alerts",
c="deploy", f="alert")(
M("Create", m="create"),
M("InBox",
c="deploy", f="email_inbox",
),
M("Settings",
c="deploy", f="email_channel",
p="update", t="msg_email_channel",
),
),
M("Assignments",
c="deploy", f="assignment", m="summary"
),
M("Job Titles",
c="deploy", f="job_title"
),
M("Human Resources",
c="deploy", f="human_resource", m="summary")(
M("Add Deployables",
c="deploy", f="application", m="select",
p="create", t="deploy_application",
),
M("Import Human Resources",
c="deploy", f="person", m="import"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def disease():
""" Disease Case Tracking and Contact Tracing """
return M(c="disease")(
M("Cases",
c="disease", f="case", m="summary")(
M("Create", m="create"),
M("Watch List", m="summary",
vars={"~.monitoring_level__belongs": "OBSERVATION,DIAGNOSTICS"}),
),
M("Contact Tracing",
c="disease", f="tracing")(
M("Create", m="create"),
),
M("Statistics Data",
c="disease", f="stats_data", args="summary")(
M("Create", m="create"),
M("Time Plot", m="timeplot"),
M("Import", m="import"),
),
M("Statistics",
c="disease", f="statistic")(
M("Create", m="create"),
),
M("Diseases",
c="disease", f="disease")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def doc():
""" DOC Menu """
return M(c="doc")(
M("Documents", f="document")(
M("Create", m="create"),
),
M("Photos", f="image")(
M("Create", m="create"),
#M("Bulk Uploader", f="bulk_upload"),
)
)
# -------------------------------------------------------------------------
@staticmethod
def dvi():
""" DVI / Disaster Victim Identification """
return M(c="dvi")(
#M("Home", f="index"),
M("Recovery Requests", f="recreq")(
M("New Request", m="create"),
M("List Current",
vars={"recreq.status":"1,2,3"}),
),
M("Dead Bodies", f="body")(
M("Add", m="create"),
M("List unidentified",
vars={"identification.status": "None"}),
M("Report by Age/Gender", m="report",
vars=dict(rows="age_group",
cols="gender",
fact="count(pe_label)",
),
),
),
M("Missing Persons", f="person")(
M("List all"),
),
M("Morgues", f="morgue")(
M("Create", m="create"),
),
M("Dashboard", f="index"),
)
# -------------------------------------------------------------------------
@staticmethod
def dvr():
""" DVR Menu """
return M(c="dvr")(
M("Cases", f="case")(
M("Create", m="create"),
M("Report", m="report"),
),
M("Needs", f="need")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def event():
""" EVENT / Event Module """
return M()(
M("Scenarios", c="scenario", f="scenario")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
M("Events", c="event", f="event")(
M("Create", m="create"),
),
M("Event Types", c="event", f="event_type")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
M("Incidents", c="event", f="incident")(
M("Create", m="create"),
),
M("Incident Reports", c="event", f="incident_report", m="summary")(
M("Create", m="create"),
),
M("Incident Types", c="event", f="incident_type")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def fire():
""" FIRE """
return M(c="fire")(
M("Fire Stations", f="station")(
M("Create", m="create"),
M("Map", m="map"),
M("Import Stations", m="import"),
M("Import Vehicles", f="station_vehicle", m="import"),
),
M("Fire Zones", f="zone")(
M("Create", m="create"),
#M("Map", m="map"),
#M("Import", m="import"),
),
M("Zone Types", f="zone_type")(
M("Create", m="create"),
#M("Map", m="map"),
#M("Import", m="import"),
),
M("Water Sources", f="water_source")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import"),
),
M("Hazard Points", f="hazard_point")(
M("Create", m="create"),
M("Import", m="import"),
)
)
# -------------------------------------------------------------------------
@staticmethod
def gis():
""" GIS / GIS Controllers """
MAP_ADMIN = current.session.s3.system_roles.MAP_ADMIN
settings = current.deployment_settings
gis_menu = settings.get_gis_menu()
def pois(i):
poi_resources = settings.get_gis_poi_create_resources()
if not poi_resources:
return False
for res in poi_resources:
if res["table"] == "gis_poi":
return True
return False
def config_menu(i):
auth = current.auth
if not auth.is_logged_in():
# Anonymous users can never cofnigure the Map
return False
s3db = current.s3db
if auth.s3_has_permission("create",
s3db.gis_config):
# If users can create configs then they can see the menu item
return True
# Look for this user's config
table = s3db.gis_config
query = (table.pe_id == auth.user.pe_id)
config = current.db(query).select(table.id,
limitby=(0, 1),
cache=s3db.cache).first()
if config:
return True
def config_args():
auth = current.auth
if not auth.user:
# Won't show anyway due to check
return []
if auth.s3_has_role(MAP_ADMIN):
# Full List
return []
# Look for this user's config
s3db = current.s3db
table = s3db.gis_config
query = (table.pe_id == auth.user.pe_id)
config = current.db(query).select(table.id,
limitby=(0, 1),
cache=s3db.cache).first()
if config:
# Link direct to the User's config
return [config.id, "layer_entity"]
# Link to the Create form
return ["create"]
return M(c="gis")(
M("Fullscreen Map", c="gis", f="map_viewing_client"),
# Currently not got geocoding support
#M("Bulk Uploader", c="doc", f="bulk_upload"),
M("Locations", c="gis", f="location")(
M("Create", m="create"),
#M("Create Location Group", m="create", vars={"group": 1}),
M("Import from CSV", m="import", restrict=[MAP_ADMIN]),
M("Import from OpenStreetMap", m="import_poi",
restrict=[MAP_ADMIN]),
#M("Geocode", f="geocode_manual"),
),
M("PoIs", c="gis", f="poi", check=pois)(),
#M("Population Report", f="location", m="report",
# vars=dict(rows="name",
# fact="sum(population)",
# ),
# ),
M("Configuration", c="gis", f="config", args=config_args(),
_id="gis_menu_config",
check=config_menu),
M("Admin", c="gis", restrict=[MAP_ADMIN])(
M("Hierarchy", f="hierarchy"),
M("Layers", f="catalog"),
M("Markers", f="marker"),
M("Menu", f="menu",
check=[gis_menu]),
M("PoI Types", f="poi_type",
check=[pois]),
M("Projections", f="projection"),
M("Styles", f="style"),
)
)
# -------------------------------------------------------------------------
@staticmethod
def hms():
""" HMS / Hospital Status Assessment and Request Management """
#s3 = current.response.s3
return M(c="hms")(
M("Hospitals", f="hospital")(
M("Create", m="create"),
M("Map", m="map"),
M("Report", m="report"),
M("Import", m="import", p="create"),
#SEP(),
#M("Show Map", c="gis", f="map_viewing_client",
#vars={"kml_feed" : "%s/hms/hospital.kml" %
#s3.base_url, "kml_name" : "Hospitals_"})
)
)
# -------------------------------------------------------------------------
@staticmethod
def hrm():
""" HRM / Human Resources Management """
s3 = current.session.s3
ADMIN = s3.system_roles.ADMIN
# Custom conditions for the check-hook, as lambdas in order
# to have them checked only immediately before rendering:
manager_mode = lambda i: s3.hrm.mode is None
personal_mode = lambda i: s3.hrm.mode is not None
skills = lambda i: settings.get_hrm_use_skills()
certificates = lambda i: settings.get_hrm_use_certificates()
is_org_admin = lambda i: s3.hrm.orgs and True or \
ADMIN in s3.roles
settings = current.deployment_settings
teams = settings.get_hrm_teams()
use_teams = lambda i: teams
vol_enabled = lambda i: settings.has_module("vol")
return M(c="hrm")(
M(settings.get_hrm_staff_label(), f="staff", m="summary",
check=manager_mode)(
M("Create", m="create"),
M("Search by Skills", f="competency", check=skills),
M("Import", f="person", m="import",
vars={"group":"staff"}, p="create"),
),
M("Staff & Volunteers (Combined)",
c="hrm", f="human_resource", m="summary",
check=(manager_mode, vol_enabled)),
M(teams, f="group",
check=(manager_mode, use_teams))(
M("Create", m="create"),
M("Search Members", f="group_membership"),
M("Import", f="group_membership", m="import"),
),
M("Department Catalog", f="department",
check=manager_mode)(
M("Create", m="create"),
),
M("Job Title Catalog", f="job_title",
check=manager_mode)(
M("Create", m="create"),
),
M("Skill Catalog", f="skill",
check=(manager_mode, skills))(
M("Create", m="create"),
#M("Skill Provisions", f="skill_provision"),
),
M("Training Events", f="training_event",
check=manager_mode)(
M("Create", m="create"),
M("Search Training Participants", f="training"),
M("Import Participant List", f="training", m="import"),
),
M("Training Course Catalog", f="course",
check=manager_mode)(
M("Create", m="create"),
#M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", f="certificate",
check=manager_mode)(
M("Create", m="create"),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Reports", f="staff", m="report",
check=manager_mode)(
M("Staff Report", m="report"),
M("Expiring Staff Contracts Report",
vars=dict(expiring=1)),
M("Training Report", f="training", m="report"),
),
M("Personal Profile", f="person",
check=personal_mode, vars=dict(access="personal")),
# This provides the link to switch to the manager mode:
M("Staff Management", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
M("Personal Profile", f="person",
check=manager_mode, vars=dict(access="personal"))
)
# -------------------------------------------------------------------------
@staticmethod
def vol():
""" Volunteer Management """
s3 = current.session.s3
ADMIN = s3.system_roles.ADMIN
# Custom conditions for the check-hook, as lambdas in order
# to have them checked only immediately before rendering:
manager_mode = lambda i: s3.hrm.mode is None
personal_mode = lambda i: s3.hrm.mode is not None
is_org_admin = lambda i: s3.hrm.orgs and True or \
ADMIN in s3.roles
settings = current.deployment_settings
show_programmes = lambda i: settings.get_hrm_vol_experience() == "programme"
show_tasks = lambda i: settings.has_module("project") and \
settings.get_project_mode_task()
skills = lambda i: settings.get_hrm_use_skills()
certificates = lambda i: settings.get_hrm_use_certificates()
teams = settings.get_hrm_teams()
use_teams = lambda i: teams
show_staff = lambda i: settings.get_hrm_show_staff()
return M(c="vol")(
M("Volunteers", f="volunteer", m="summary",
check=(manager_mode))(
M("Create", m="create"),
M("Search by skills", f="competency", check=skills),
M("Import", f="person", m="import",
vars={"group":"volunteer"}, p="create"),
),
M("Staff & Volunteers (Combined)",
c="vol", f="human_resource", m="summary",
check=(manager_mode, show_staff)),
M(teams, f="group",
check=(manager_mode, use_teams))(
M("Create", m="create"),
M("Search Members", f="group_membership"),
M("Import", f="group_membership", m="import"),
),
M("Department Catalog", f="department",
check=manager_mode)(
M("Create", m="create"),
),
M("Volunteer Role Catalog", f="job_title",
check=manager_mode)(
M("Create", m="create"),
),
M("Skill Catalog", f="skill",
check=manager_mode)(
M("Create", m="create"),
#M("Skill Provisions", f="skill_provision"),
),
M("Training Events", f="training_event",
check=manager_mode)(
M("Create", m="create"),
M("Search Training Participants", f="training"),
M("Import Participant List", f="training", m="import"),
),
M("Training Course Catalog", f="course",
check=manager_mode)(
M("Create", m="create"),
#M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", f="certificate",
check=(manager_mode, certificates))(
M("Create", m="create"),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Programs", f="programme",
check=[manager_mode, show_programmes])(
M("Create", m="create"),
M("Import Hours", f="programme_hours", m="import"),
),
M("Reports", f="volunteer", m="report",
check=manager_mode)(
M("Volunteer Report", m="report"),
M("Hours by Role Report", f="programme_hours", m="report",
vars=Storage(rows="job_title_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Hours by Program Report", f="programme_hours", m="report",
vars=Storage(rows="programme_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Training Report", f="training", m="report"),
),
M("My Profile", f="person",
check=personal_mode, vars=dict(access="personal")),
M("My Tasks", f="task",
check=[personal_mode, show_tasks],
vars=dict(access="personal",
mine=1)),
# This provides the link to switch to the manager mode:
M("Volunteer Management", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
M("Personal Profile", f="person",
check=manager_mode, vars=dict(access="personal"))
)
# -------------------------------------------------------------------------
@staticmethod
def inv():
""" INV / Inventory """
ADMIN = current.session.s3.system_roles.ADMIN
current.s3db.inv_recv_crud_strings()
inv_recv_list = current.response.s3.crud_strings.inv_recv.title_list
settings = current.deployment_settings
use_adjust = lambda i: not settings.get_inv_direct_stock_edits()
use_commit = lambda i: settings.get_req_use_commit()
return M()(
#M("Home", f="index"),
M("Warehouses", c="inv", f="warehouse")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
M("Warehouse Stock", c="inv", f="inv_item")(
M("Adjust Stock Levels", f="adj", check=use_adjust),
M("Kitting", f="kitting"),
M("Import", f="inv_item", m="import", p="create"),
),
M("Reports", c="inv", f="inv_item")(
M("Warehouse Stock", f="inv_item", m="report"),
M("Expiration Report", c="inv", f="track_item",
vars=dict(report="exp")),
M("Monetization Report", c="inv", f="inv_item",
vars=dict(report="mon")),
M("Utilization Report", c="inv", f="track_item",
vars=dict(report="util")),
M("Summary of Incoming Supplies", c="inv", f="track_item",
vars=dict(report="inc")),
M("Summary of Releases", c="inv", f="track_item",
vars=dict(report="rel")),
),
M(inv_recv_list, c="inv", f="recv", translate=False)( # Already T()
M("Create", m="create"),
M("Timeline", args="timeline"),
),
M("Sent Shipments", c="inv", f="send")(
M("Create", m="create"),
M("Search Shipped Items", f="track_item"),
M("Timeline", args="timeline"),
),
M("Items", c="supply", f="item", m="summary")(
M("Create", m="create"),
M("Import", f="catalog_item", m="import", p="create"),
),
# Catalog Items moved to be next to the Item Categories
#M("Catalog Items", c="supply", f="catalog_item")(
#M("Create", m="create"),
#),
#M("Brands", c="supply", f="brand",
# restrict=[ADMIN])(
# M("Create", m="create"),
#),
M("Catalogs", c="supply", f="catalog")(
M("Create", m="create"),
),
M("Item Categories", c="supply", f="item_category",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Suppliers", c="inv", f="supplier")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
M("Facilities", c="inv", f="facility")(
M("Create", m="create", t="org_facility"),
),
M("Facility Types", c="inv", f="facility_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Warehouse Types", c="inv", f="warehouse_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Requests", c="req", f="req")(
M("Create", m="create"),
M("Requested Items", f="req_item"),
),
M("Commitments", c="req", f="commit", check=use_commit)(
),
)
# -------------------------------------------------------------------------
@staticmethod
def irs():
""" IRS / Incident Report System """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="irs")(
M("Incident Reports", f="ireport")(
M("Create Incident Report", m="create"),
M("Open Incidents", vars={"open":1}),
M("Map", m="map"),
M("Timeline", args="timeline"),
M("Import", m="import"),
M("Report", m="report")
),
M("Incident Categories", f="icategory", restrict=[ADMIN])(
M("Create", m="create"),
),
M("Ushahidi Import", f="ireport", restrict=[ADMIN],
args="ushahidi")
)
# -------------------------------------------------------------------------
@staticmethod
def security():
""" Security Management System """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="security")(
M("Incident Reports", c="event", f="incident_report", m="summary")(
M("Create", m="create"),
M("Import", m="import"),
),
M("Security Levels", f="level")(
M("level", m="create"),
),
M("Security Zones", f="zone")(
M("Create", m="create"),
),
M("Facilities", c="org", f="facility", m="summary")(
M("Create", m="create"),
M("Import", m="import"),
),
M("Personnel", f="staff")(
M("Create", m="create"),
M("List All Security-related Staff"),
M("List All Essential Staff", f="essential"),
),
M("Incident Categories", c="event", f="incident_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Facility Types", c="org", f="facility_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Zone Types", f="zone_type", restrict=[ADMIN])(
M("Create", m="create"),
),
M("Security Staff Types", f="staff_type", restrict=[ADMIN])(
M("Create", m="create"),
),
#M("Ushahidi Import", c="irs", f="ireport", restrict=[ADMIN],
# args="ushahidi")
)
# -------------------------------------------------------------------------
def scenario(self):
""" SCENARIO """
# Use EVENT menu
return self.event()
# -------------------------------------------------------------------------
def supply(self):
""" SUPPLY """
# Use INV menu
return self.inv()
# -------------------------------------------------------------------------
@staticmethod
def survey():
""" SURVEY / Survey """
ADMIN = current.session.s3.system_roles.ADMIN
# Do we have a series_id?
series_id = False
get_vars = Storage()
try:
series_id = int(current.request.args[0])
except:
try:
(dummy, series_id) = current.request.get_vars["viewing"].split(".")
series_id = int(series_id)
except:
pass
if series_id:
get_vars.viewing = "survey_complete.%s" % series_id
return M(c="survey")(
M("Assessment Templates", f="template")(
M("Create", m="create"),
),
#M("Section", f="section")(
# M("Create", args="create"),
#),
M("Disaster Assessments", f="series")(
M("Create", m="create"),
),
M("Administration", f="admin", restrict=[ADMIN])(
M("Import Templates", f="question_list",
m="import", p="create"),
M("Import Template Layout", f="formatter",
m="import", p="create"),
M("Import Completed Assessment Forms", f="complete",
m="import", p="create", vars=get_vars, check=series_id),
),
)
# -------------------------------------------------------------------------
@staticmethod
def member():
""" Membership Management """
return M(c="member")(
M("Members", f="membership", m="summary")(
M("Create", m="create"),
#M("Report", m="report"),
M("Import", f="person", m="import"),
),
M("Membership Types", f="membership_type")(
M("Create", m="create"),
#M("Import", m="import"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def mpr():
""" MPR / Missing Person Registry """
return M(c="mpr")(
M("Missing Persons", f="person")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
def msg(self):
""" MSG / Messaging """
ADMIN = current.session.s3.system_roles.ADMIN
if current.request.function in ("sms_outbound_gateway",
"email_channel",
"facebook_channel",
"sms_modem_channel",
"sms_smtp_channel",
"sms_webapi_channel",
"tropo_channel",
"twitter_channel"):
return self.admin()
settings_messaging = self.settings_messaging()
return M(c="msg")(
M("Compose", f="compose"),
M("InBox", f="inbox")(
M("Email", f="email_inbox"),
#M("Facebook", f="facebook_inbox"),
M("RSS", f="rss"),
M("SMS", f="sms_inbox"),
M("Twitter", f="twitter_inbox"),
),
M("Outbox", f="outbox")(
M("Email", f="email_outbox"),
M("Facebook", f="facebook_outbox"),
M("SMS", f="sms_outbox"),
M("Twitter", f="twitter_outbox"),
),
M("Message Log", f="message"),
M("Distribution groups", f="group")(
M("Group Memberships", f="group_membership"),
),
M("Twitter Search", f="twitter_result")(
M("Search Queries", f="twitter_search"),
M("Results", f="twitter_result"),
# @ToDo KeyGraph Results
),
M("Administration", restrict=[ADMIN])(settings_messaging)
)
# -------------------------------------------------------------------------
@staticmethod
def org():
""" ORG / Organization Registry """
ADMIN = current.session.s3.system_roles.ADMIN
SECTORS = "Clusters" if current.deployment_settings.get_ui_label_cluster() \
else "Sectors"
return M(c="org")(
M("Organizations", f="organisation")(
M("Create", m="create"),
M("Import", m="import")
),
M("Offices", f="office")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import")
),
M("Facilities", f="facility")(
M("Create", m="create"),
M("Import", m="import"),
),
M("Resource Inventory", f="resource")(
M("Create", m="create"),
M("Import", m="import")
),
M("Resources", f="resource", m="summary")(
M("Create", m="create"),
M("Import", m="import")
),
M("Organization Types", f="organisation_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Office Types", f="office_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M("Facility Types", f="facility_type",
restrict=[ADMIN])(
M("Create", m="create"),
),
M(SECTORS, f="sector", restrict=[ADMIN])(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def patient():
""" PATIENT / Patient Tracking """
return M(c="patient")(
M("Patients", f="patient")(
M("Create", m="create"),
)
)
# -------------------------------------------------------------------------
@staticmethod
def po():
""" PO / Population Outreach """
due_followups = current.s3db.po_due_followups()
DUE_FOLLOWUPS = current.T("Due Follow-ups")
if due_followups:
follow_up_label = "%s (%s)" % (DUE_FOLLOWUPS, due_followups)
else:
follow_up_label = DUE_FOLLOWUPS
return M(c="po")(
M("Overview", f="index"),
M("Households", f="household", m="summary")(
M("Create", m="create"),
M("Import", m="import"),
),
M(follow_up_label, f="due_followups",
translate=False,
),
M("Areas", f="area")(
M("Create", m="create"),
),
M("Referral Agencies", f="organisation")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def pr():
""" PR / Person Registry """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="pr", restrict=ADMIN)(
M("Persons", f="person")(
M("Create", m="create"),
),
M("Groups", f="group")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def proc():
""" PROC / Procurement """
return M(c="proc")(
M("Procurement Plans", f="plan")(
M("Create", m="create"),
),
M("Suppliers", f="supplier")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def project():
""" PROJECT / Project Tracking & Management """
settings = current.deployment_settings
#activities = lambda i: settings.get_project_activities()
activity_types = lambda i: settings.get_project_activity_types()
community = settings.get_project_community()
if community:
IMPORT = "Import Project Communities"
else:
IMPORT = "Import Project Locations"
community_volunteers = lambda i: settings.get_project_community_volunteers()
hazards = lambda i: settings.get_project_hazards()
#indicators = lambda i: settings.get_project_indicators()
programmes = lambda i: settings.get_project_programmes()
sectors = lambda i: settings.get_project_sectors()
stats = lambda i: settings.has_module("stats")
themes = lambda i: settings.get_project_themes()
menu = M(c="project")
if settings.get_project_mode_3w():
if community:
menu(
M("Programs", f="programme",
check=programmes)(
M("Create", m="create"),
),
M("Projects", f="project")(
M("Create", m="create"),
),
M("Communities", f="location")(
# Better created from tab (otherwise Activity Type filter won't work)
#M("Create", m="create"),
M("Map", m="map"),
M("Community Contacts", f="location_contact"),
M("Community Volunteers", f="volunteer",
check=community_volunteers),
),
)
else:
menu(
M("Programs", f="programme",
check=programmes)(
M("Create", m="create"),
),
M("Projects", f="project")(
M("Create", m="create"),
M("Map", f="location", m="map"),
)
)
menu(
M("Reports", f="location", m="report")(
M("3W", f="location", m="report"),
M("Beneficiaries", f="beneficiary", m="report",
check=stats,
),
#M("Indicators", f="indicator", m="report",
# check=indicators,
# ),
#M("Indicators over Time", f="indicator", m="timeplot",
# check=indicators,
# ),
M("Funding", f="organisation", m="report"),
),
M("Import", f="project", m="import", p="create")(
M("Import Projects", m="import", p="create"),
M("Import Project Organizations", f="organisation",
m="import", p="create"),
M(IMPORT, f="location",
m="import", p="create"),
),
M("Partner Organizations", f="partners")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
M("Activity Types", f="activity_type",
check=activity_types)(
M("Create", m="create"),
),
M("Beneficiary Types", f="beneficiary_type",
check=stats)(
M("Create", m="create"),
),
M("Demographics", f="demographic",
check=stats)(
M("Create", m="create"),
),
M("Hazards", f="hazard",
check=hazards)(
M("Create", m="create"),
),
#M("Indicators", f="indicator",
# check=indicators)(
# M("Create", m="create"),
#),
M("Sectors", f="sector",
check=sectors)(
M("Create", m="create"),
),
M("Themes", f="theme",
check=themes)(
M("Create", m="create"),
),
)
elif settings.get_project_mode_task():
menu(
M("Projects", f="project")(
M("Create", m="create"),
M("Open Tasks for Project", vars={"tasks":1}),
),
M("Tasks", f="task")(
M("Create", m="create"),
),
)
if current.auth.s3_has_role("STAFF"):
ADMIN = current.session.s3.system_roles.ADMIN
menu(
M("Daily Work", f="time")(
M("My Logged Hours", vars={"mine":1}),
M("My Open Tasks", f="task", vars={"mine":1}),
),
M("Admin", restrict=[ADMIN])(
M("Activity Types", f="activity_type"),
M("Import Tasks", f="task", m="import", p="create"),
),
M("Reports", f="report")(
M("Activity Report", f="activity", m="report"),
M("Last Week's Work", f="time", m="report",
vars=Storage(rows="person_id",
cols="day",
fact="sum(hours)",
week=1)),
M("Last Month's Work", f="time", m="report",
vars=Storage(rows="person_id",
cols="week",
fact="sum(hours)",
month=1)),
M("Project Time Report", f="time", m="report"),
),
)
else:
menu(
M("Projects", f="project")(
M("Create", m="create"),
M("Import", m="import", p="create"),
),
)
return menu
# -------------------------------------------------------------------------
@staticmethod
def req():
""" REQ / Request Management """
ADMIN = current.session.s3.system_roles.ADMIN
settings = current.deployment_settings
types = settings.get_req_req_type()
if len(types) == 1:
t = types[0]
if t == "Stock":
create_menu = M("Create", m="create", vars={"type": 1})
elif t == "People":
create_menu = M("Create", m="create", vars={"type": 2})
else:
create_menu = M("Create", m="create")
else:
create_menu = M("Create", m="create")
recurring = lambda i: settings.get_req_recurring()
use_commit = lambda i: settings.get_req_use_commit()
req_items = lambda i: "Stock" in types
req_skills = lambda i: "People" in types
return M(c="req")(
M("Requests", f="req")(
create_menu,
M("List Recurring Requests", f="req_template", check=recurring),
M("Map", m="map"),
M("Report", m="report"),
M("Search All Requested Items", f="req_item",
check=req_items),
M("Search All Requested Skills", f="req_skill",
check=req_skills),
),
M("Commitments", f="commit", check=use_commit)(
),
M("Items", c="supply", f="item")(
M("Create", m="create"),
M("Report", m="report"),
M("Import", m="import", p="create"),
),
# Catalog Items moved to be next to the Item Categories
#M("Catalog Items", c="supply", f="catalog_item")(
#M("Create", m="create"),
#),
M("Catalogs", c="supply", f="catalog")(
M("Create", m="create"),
),
M("Item Categories", c="supply", f="item_category",
restrict=[ADMIN])(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def stats():
""" Statistics """
return M(c="stats")(
M("Demographics", f="demographic")(
M("Create", m="create"),
),
M("Demographic Data", f="demographic_data", args="summary")(
M("Create", m="create"),
# Not usually dis-aggregated
M("Time Plot", m="timeplot"),
M("Import", m="import"),
),
)
# -------------------------------------------------------------------------
def sync(self):
""" SYNC menu """
# Use admin menu
return self.admin()
# -------------------------------------------------------------------------
@staticmethod
def tour():
""" Guided Tour """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="tour")(
M("Configuration", f="config", restrict=[ADMIN])(
M("Import", m="import", restrict=[ADMIN]),
),
M("Detail", f="details", restrict=[ADMIN]),
M("User", f="user", restrict=[ADMIN]),
)
# -------------------------------------------------------------------------
@staticmethod
def transport():
""" TRANSPORT """
ADMIN = current.session.s3.system_roles.ADMIN
return M(c="transport")(
M("Airports", f="airport")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import", restrict=[ADMIN]),
),
M("Heliports", f="heliport")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import", restrict=[ADMIN]),
),
M("Seaports", f="seaport")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import", restrict=[ADMIN]),
),
)
# -------------------------------------------------------------------------
@staticmethod
def vehicle():
""" VEHICLE / Vehicle Tracking """
return M(c="vehicle")(
M("Vehicles", f="vehicle")(
M("Create", m="create"),
M("Import", m="import", p="create"),
M("Map", m="map"),
),
M("Vehicle Types", f="vehicle_type")(
M("Create", m="create"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def vulnerability():
""" Vulnerability """
return M(c="vulnerability")(
M("Indicators", f="indicator")(
M("Create", m="create"),
),
M("Data", f="data")(
M("Create", m="create"),
M("Import", m="import"),
),
)
# -------------------------------------------------------------------------
@staticmethod
def water():
""" Water: Floods, etc """
return M(c="water")(
M("Gauges", f="gauge")(
M("Create", m="create"),
M("Map", m="map"),
M("Import", m="import"),
),
M("Rivers", f="river")(
M("Create", m="create"),
M("Map", m="map"),
#M("Import", m="import"),
),
M("Zones", f="zone")(
M("Create", m="create"),
M("Map", m="map"),
#M("Import", m="import"),
),
M("Zone Types", f="zone_type")(
M("Create", m="create"),
M("Map", m="map"),
#M("Import", m="import"),
),
)
# -------------------------------------------------------------------------
@classmethod
def settings_messaging(cls):
""" Messaging settings menu items:
These items are used in multiple menus, but each item instance can
always only belong to one parent, so we need to re-instantiate
with the same parameters, and therefore this is defined as a
function here.
"""
return [
M("Email Channels (Inbound)", c="msg", f="email_channel"),
M("Facebook Channels", c="msg", f="facebook_channel"),
M("RSS Channels", c="msg", f="rss_channel"),
M("SMS Outbound Gateways", c="msg", f="sms_outbound_gateway")(
M("SMS Modem Channels", c="msg", f="sms_modem_channel"),
M("SMS SMTP Channels", c="msg", f="sms_smtp_channel"),
M("SMS WebAPI Channels", c="msg", f="sms_webapi_channel"),
),
M("Mobile Commons Channels", c="msg", f="mcommons_channel"),
M("Twilio Channels", c="msg", f="twilio_channel"),
M("Twitter Channels", c="msg", f="twitter_channel"),
M("Parsers", c="msg", f="parser"),
]
# -------------------------------------------------------------------------
@classmethod
def breadcrumbs(cls):
""" Breadcrumbs from the current options menu """
# Configure the layout:
layout = S3BreadcrumbsLayout
request = current.request
controller = request.controller
function = request.function
all_modules = current.deployment_settings.modules
# Start with a link to the homepage - always:
breadcrumbs = layout()(
layout(all_modules["default"].name_nice)
)
# Append the current module's homepage - always:
# @note: this may give a breadcrumb for which there's no menu item
# and should therefore perhaps be replaced by a real path-check in
# the main menu?
if controller != "default":
try:
breadcrumbs(
layout(all_modules[controller].name_nice, c=controller)
)
except:
# Module not defined
pass
# This checks the path in the options menu, omitting the top-level item
# (because that's the menu itself which doesn't have a linked label):
menu = current.menu.options
if menu and function != "index":
branch = menu.branch()
if branch:
path = branch.path()
if len(path) > 1:
for item in path[1:]:
breadcrumbs(
layout(item.label,
c=item.get("controller"),
f=item.get("function"),
args=item.args,
# Should we retain the request vars in case
# the item has no vars? Or shall we merge them
# in any case? Didn't see the use-case yet
# anywhere...
vars=item.vars))
return breadcrumbs
# END =========================================================================
| mit |
pascalmolin/arb | doc/source/verify_taylor.py | 3 | 7858 | from gmpy import mpq, lcm, denom, numer, fac
def atan_coefficients(NN, bits):
ps = []
qs = []
temp = []
Q = 1
for k in range(2*NN+50):
p = 1
q = 2*k+1
if lcm(Q, q) < 2**bits:
temp.append(mpq(p,q))
Q = lcm(Q, q)
else:
for a in temp:
ps.append(int(a * Q))
qs.append(int(Q))
Q = q
temp = [mpq(p,q)]
return ps[:NN], qs[:NN]
def exp_coefficients(M, bits):
N = 2*M+50
Qs = [fac(k) for k in range(N)]
prevstop = 0
for k in range(N):
if Qs[k] >= 2**bits-1:
q = Qs[k-1]
for i in range(k, N): Qs[i] //= q
for i in range(prevstop, k): Qs[i] = q
prevstop = k
Ps = Qs[:]
fact = 1
for k in range(1, N):
assert Qs[k] < 2**bits-1
if Qs[k] == Qs[k-1]:
fact *= k
else:
fact = k
Ps[k] //= fact
return map(int, Ps)[:N], map(int, Qs)[:N]
class FixedPointBound(object):
def __init__(self, bits, mid, rad):
self.bits = bits
self.mid = mpq(mid)
self.rad = mpq(rad) # rad is in ulp
def add(self, other):
if isinstance(other, FixedPointBound):
mid = self.mid + other.mid
rad = self.rad + other.rad
else:
assert other == int(other) and other >= 0
mid = self.mid + int(other)
rad = self.rad
return FixedPointBound(self.bits, mid, rad)
def mul(self, other):
if isinstance(other, FixedPointBound):
MAX_ULP = mpq(1, 2**self.bits)
mid = self.mid * other.mid
rad = 0
rad += self.rad * other.mid # ulp
rad += self.mid * other.rad # ulp
rad += self.rad * other.rad * MAX_ULP # ulp
rad += 1 # ulp rounding
else:
assert other == int(other) and other >= 0
mid = self.mid * int(other)
rad = self.rad * int(other)
return FixedPointBound(self.bits, mid, rad)
def div(self, other):
assert other == int(other) and other >= 0
mid = self.mid / mpq(other)
rad = self.rad / mpq(other) + 1
return FixedPointBound(self.bits, mid, rad)
def addmul(self, other, c):
assert c == int(c) and c >= 0
c = abs(int(c))
mid = self.mid + other.mid * c
rad = self.rad + other.rad * c
return FixedPointBound(self.bits, mid, rad)
def check_overflow_0(self):
# check that self fits 0 integral limbs
MAX_ULP = mpq(1, 2**self.bits)
assert self.mid + self.rad * MAX_ULP < 1 - MAX_ULP
def check_overflow_1(self):
# check that self fits 1 integral limb
MAX_ULP = mpq(1, 2**self.bits)
assert self.mid + self.rad * MAX_ULP < 2**self.bits - MAX_ULP
def check_le_int(self, c):
# check that |self| <= c
MAX_ULP = mpq(1, 2**self.bits)
assert self.mid + self.rad * MAX_ULP <= c
def verify_atan(N, PS, QS, bits):
X = FixedPointBound(bits, mpq(1,16), 0)
S = FixedPointBound(bits, 0, 0)
m = 2
while m * m < N:
m += 2
T = [None] * (m+1)
T[1] = X.mul(X)
T[2] = T[1].mul(T[1])
for k in range(4, m + 1, 2):
T[k-1] = T[k//2].mul(T[k//2-1])
T[k] = T[k//2].mul(T[k//2])
for k in range(N-1, -1, -1):
c, d, e = PS[k], QS[k], QS[k+1]
if d != e and k < N-1:
# if alternating, adding e must give a nonnegative number
S.check_le_int(e)
# adding e must not overflow
S.add(e).check_overflow_1()
S = S.mul(d).div(e)
# if alternating, adding d must not overflow
S.add(d).check_overflow_1()
if k % m == 0:
# if alternating, adding c must give a nonnegative number
S.check_le_int(c)
S = S.add(c)
S.check_overflow_1()
if k != 0:
S = S.mul(T[m])
S.check_overflow_1()
else:
S = S.addmul(T[k % m], c)
S.check_overflow_1()
S = S.div(mpq(QS[0]))
S = S.mul(X)
S.check_overflow_0()
print N, float(S.mid), float(S.rad)
assert S.rad <= 2
def verify_exp(N, PS, QS, bits):
X = FixedPointBound(bits, mpq(1,16), 0)
S = FixedPointBound(bits, 0, 0)
m = 2
while m * m < N:
m += 2
T = [None] * (m+1)
T[1] = X
T[2] = T[1].mul(T[1])
for k in range(4, m + 1, 2):
T[k-1] = T[k//2].mul(T[k//2-1])
T[k] = T[k//2].mul(T[k//2])
for k in range(N-1, -1, -1):
c, d, e = PS[k], QS[k], QS[k+1]
if d != e and k < N-1:
# if alternating, adding e must give a nonnegative number
S.check_le_int(e)
# adding e must not overflow
S.add(e).check_overflow_1()
S = S.div(e)
# if alternating, adding 1 must not overflow
S.add(1).check_overflow_1()
if k % m == 0:
# if alternating, adding c must give a nonnegative number
S.check_le_int(c)
S = S.add(c)
S.check_overflow_1()
if k != 0:
S = S.mul(T[m])
S.check_overflow_1()
else:
S = S.addmul(T[k % m], c)
S.check_overflow_1()
S = S.div(mpq(QS[0]))
S.check_overflow_1()
print N, float(S.mid), float(S.rad)
assert S.rad <= 2
def verify_sin_cos(N, PS, QS, bits):
X = FixedPointBound(bits, mpq(1,16), 0)
m = 2
while m * m < N:
m += 2
T = [None] * (m+1)
T[1] = X.mul(X)
T[2] = T[1].mul(T[1])
for k in range(4, m + 1, 2):
T[k-1] = T[k//2].mul(T[k//2-1])
T[k] = T[k//2].mul(T[k//2])
for cosorsin in range(2):
S = FixedPointBound(bits, 0, 0)
for k in range(N-1, -1, -1):
c, d, e = PS[2*k+cosorsin], QS[2*k+cosorsin], QS[2*k+cosorsin+2]
if d != e and k < N-1:
# if alternating, adding e must give a nonnegative number
S.check_le_int(e)
# adding e must not overflow
S.add(e).check_overflow_1()
S = S.div(e)
# if alternating, adding 1 must not overflow
S.add(1).check_overflow_1()
if k % m == 0:
# if alternating, adding c must give a nonnegative number
S.check_le_int(c)
S = S.add(c)
S.check_overflow_1()
if k != 0:
S = S.mul(T[m])
S.check_overflow_1()
else:
S = S.addmul(T[k % m], c)
S.check_overflow_1()
if cosorsin == 0:
S = S.div(mpq(QS[0]))
S.check_overflow_1()
# note: top limb must actually be 0 or 1;
# but this follows by S.rad <= 2
print N, float(S.mid), float(S.rad)
assert S.rad <= 2
else:
S = S.div(mpq(QS[0]))
S.check_overflow_1()
S = S.mul(X)
S.check_overflow_0()
print N, float(S.mid), float(S.rad)
assert S.rad <= 2
for bits in [32, 64]:
PS, QS = exp_coefficients(300, bits)
for N in range(300):
verify_sin_cos(N, PS, QS, bits)
for bits in [32, 64]:
PS, QS = exp_coefficients(300, bits)
for N in range(300):
verify_exp(N, PS, QS, bits)
for bits in [32, 64]:
PS, QS = atan_coefficients(300, bits)
for N in range(300):
verify_atan(N, PS, QS, bits)
| lgpl-2.1 |
mne-tools/mne-tools.github.io | 0.13/_downloads/plot_mne_dspm_source_localization.py | 4 | 5081 | """
.. _tut_inverse_mne_dspm:
Source localization with MNE/dSPM/sLORETA
=========================================
The aim of this tutorials is to teach you how to compute and apply a linear
inverse method such as MNE/dSPM/sLORETA on evoked/raw/epochs data.
"""
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.datasets import sample
from mne.minimum_norm import (make_inverse_operator, apply_inverse,
write_inverse_operator)
###############################################################################
# Process MEG data
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = mne.io.read_raw_fif(raw_fname, add_eeg_ref=False)
raw.set_eeg_reference() # set EEG average reference
events = mne.find_events(raw, stim_channel='STI 014')
event_id = dict(aud_r=1) # event trigger and conditions
tmin = -0.2 # start of each epoch (200ms before the trigger)
tmax = 0.5 # end of each epoch (500ms after the trigger)
raw.info['bads'] = ['MEG 2443', 'EEG 053']
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=True,
exclude='bads')
baseline = (None, 0) # means from the first instant to t = 0
reject = dict(grad=4000e-13, mag=4e-12, eog=150e-6)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True, picks=picks,
baseline=baseline, reject=reject, add_eeg_ref=False)
###############################################################################
# Compute regularized noise covariance
# ------------------------------------
#
# For more details see :ref:`tut_compute_covariance`.
noise_cov = mne.compute_covariance(
epochs, tmax=0., method=['shrunk', 'empirical'])
fig_cov, fig_spectra = mne.viz.plot_cov(noise_cov, raw.info)
###############################################################################
# Compute the evoked response
# ---------------------------
evoked = epochs.average()
evoked.plot()
evoked.plot_topomap(times=np.linspace(0.05, 0.15, 5), ch_type='mag')
# Show whitening
evoked.plot_white(noise_cov)
###############################################################################
# Inverse modeling: MNE/dSPM on evoked and raw data
# -------------------------------------------------
# Read the forward solution and compute the inverse operator
fname_fwd = data_path + '/MEG/sample/sample_audvis-meg-oct-6-fwd.fif'
fwd = mne.read_forward_solution(fname_fwd, surf_ori=True)
# Restrict forward solution as necessary for MEG
fwd = mne.pick_types_forward(fwd, meg=True, eeg=False)
# make an MEG inverse operator
info = evoked.info
inverse_operator = make_inverse_operator(info, fwd, noise_cov,
loose=0.2, depth=0.8)
write_inverse_operator('sample_audvis-meg-oct-6-inv.fif',
inverse_operator)
###############################################################################
# Compute inverse solution
# ------------------------
method = "dSPM"
snr = 3.
lambda2 = 1. / snr ** 2
stc = apply_inverse(evoked, inverse_operator, lambda2,
method=method, pick_ori=None)
del fwd, inverse_operator, epochs # to save memory
###############################################################################
# Visualization
# -------------
# View activation time-series
plt.plot(1e3 * stc.times, stc.data[::100, :].T)
plt.xlabel('time (ms)')
plt.ylabel('%s value' % method)
plt.show()
###############################################################################
# Here we use peak getter to move visualization to the time point of the peak
# and draw a marker at the maximum peak vertex.
vertno_max, time_max = stc.get_peak(hemi='rh')
subjects_dir = data_path + '/subjects'
brain = stc.plot(surface='inflated', hemi='rh', subjects_dir=subjects_dir,
clim=dict(kind='value', lims=[8, 12, 15]),
initial_time=time_max, time_unit='s')
brain.add_foci(vertno_max, coords_as_verts=True, hemi='rh', color='blue',
scale_factor=0.6)
brain.show_view('lateral')
###############################################################################
# Morph data to average brain
# ---------------------------
fs_vertices = [np.arange(10242)] * 2
morph_mat = mne.compute_morph_matrix('sample', 'fsaverage', stc.vertices,
fs_vertices, smooth=None,
subjects_dir=subjects_dir)
stc_fsaverage = stc.morph_precomputed('fsaverage', fs_vertices, morph_mat)
brain_fsaverage = stc_fsaverage.plot(surface='inflated', hemi='rh',
subjects_dir=subjects_dir,
clim=dict(kind='value', lims=[8, 12, 15]),
initial_time=time_max, time_unit='s')
brain_fsaverage.show_view('lateral')
###############################################################################
# Exercise
# --------
# - By changing the method parameter to 'sloreta' recompute the source
# estimates using the sLORETA method.
| bsd-3-clause |
Cashiuus/metagoofil | pdfminer/layout.py | 25 | 19560 | #!/usr/bin/env python2
import sys
from utils import INF, Plane, get_bound, uniq, csort, fsplit
from utils import bbox2str, matrix2str, apply_matrix_pt
## LAParams
##
class LAParams(object):
def __init__(self,
line_overlap=0.5,
char_margin=2.0,
line_margin=0.5,
word_margin=0.1,
boxes_flow=0.5,
detect_vertical=False,
all_texts=False):
self.line_overlap = line_overlap
self.char_margin = char_margin
self.line_margin = line_margin
self.word_margin = word_margin
self.boxes_flow = boxes_flow
self.detect_vertical = detect_vertical
self.all_texts = all_texts
return
def __repr__(self):
return ('<LAParams: char_margin=%.1f, line_margin=%.1f, word_margin=%.1f all_texts=%r>' %
(self.char_margin, self.line_margin, self.word_margin, self.all_texts))
## LTItem
##
class LTItem(object):
def __init__(self, bbox):
self.set_bbox(bbox)
return
def __repr__(self):
return ('<%s %s>' %
(self.__class__.__name__, bbox2str(self.bbox)))
def set_bbox(self, (x0,y0,x1,y1)):
self.x0 = x0
self.y0 = y0
self.x1 = x1
self.y1 = y1
self.width = x1-x0
self.height = y1-y0
self.bbox = (x0, y0, x1, y1)
return
def is_empty(self):
return self.width <= 0 or self.height <= 0
def is_hoverlap(self, obj):
assert isinstance(obj, LTItem)
return obj.x0 <= self.x1 and self.x0 <= obj.x1
def hdistance(self, obj):
assert isinstance(obj, LTItem)
if self.is_hoverlap(obj):
return 0
else:
return min(abs(self.x0-obj.x1), abs(self.x1-obj.x0))
def hoverlap(self, obj):
assert isinstance(obj, LTItem)
if self.is_hoverlap(obj):
return min(abs(self.x0-obj.x1), abs(self.x1-obj.x0))
else:
return 0
def is_voverlap(self, obj):
assert isinstance(obj, LTItem)
return obj.y0 <= self.y1 and self.y0 <= obj.y1
def vdistance(self, obj):
assert isinstance(obj, LTItem)
if self.is_voverlap(obj):
return 0
else:
return min(abs(self.y0-obj.y1), abs(self.y1-obj.y0))
def voverlap(self, obj):
assert isinstance(obj, LTItem)
if self.is_voverlap(obj):
return min(abs(self.y0-obj.y1), abs(self.y1-obj.y0))
else:
return 0
## LTCurve
##
class LTCurve(LTItem):
def __init__(self, linewidth, pts):
self.pts = pts
self.linewidth = linewidth
LTItem.__init__(self, get_bound(pts))
return
def get_pts(self):
return ','.join( '%.3f,%.3f' % p for p in self.pts )
## LTLine
##
class LTLine(LTCurve):
def __init__(self, linewidth, p0, p1):
LTCurve.__init__(self, linewidth, [p0, p1])
return
## LTRect
##
class LTRect(LTCurve):
def __init__(self, linewidth, (x0,y0,x1,y1)):
LTCurve.__init__(self, linewidth, [(x0,y0), (x1,y0), (x1,y1), (x0,y1)])
return
## LTImage
##
class LTImage(LTItem):
def __init__(self, name, stream, bbox):
LTItem.__init__(self, bbox)
self.name = name
self.stream = stream
self.srcsize = (stream.get_any(('W', 'Width')),
stream.get_any(('H', 'Height')))
self.imagemask = stream.get_any(('IM', 'ImageMask'))
self.bits = stream.get_any(('BPC', 'BitsPerComponent'), 1)
self.colorspace = stream.get_any(('CS', 'ColorSpace'))
if not isinstance(self.colorspace, list):
self.colorspace = [self.colorspace]
return
def __repr__(self):
(w,h) = self.srcsize
return ('<%s(%s) %s %dx%d>' %
(self.__class__.__name__, self.name,
bbox2str(self.bbox), w, h))
## LTText
##
class LTText(object):
def __init__(self, text):
self.text = text
return
def __repr__(self):
return ('<%s %r>' %
(self.__class__.__name__, self.text))
## LTAnon
##
class LTAnon(LTText):
pass
## LTChar
##
class LTChar(LTItem, LTText):
debug = 0
def __init__(self, matrix, font, fontsize, scaling, rise, text, textwidth, textdisp):
LTText.__init__(self, text)
self.matrix = matrix
self.fontname = font.fontname
self.adv = textwidth * fontsize * scaling
# compute the boundary rectangle.
if font.is_vertical():
# vertical
width = font.get_width() * fontsize
(vx,vy) = textdisp
if vx is None:
vx = width/2
else:
vx = vx * fontsize * .001
vy = (1000 - vy) * fontsize * .001
tx = -vx
ty = vy + rise
bll = (tx, ty+self.adv)
bur = (tx+width, ty)
else:
# horizontal
height = font.get_height() * fontsize
descent = font.get_descent() * fontsize
ty = descent + rise
bll = (0, ty)
bur = (self.adv, ty+height)
(a,b,c,d,e,f) = self.matrix
self.upright = (0 < a*d*scaling and b*c <= 0)
(x0,y0) = apply_matrix_pt(self.matrix, bll)
(x1,y1) = apply_matrix_pt(self.matrix, bur)
if x1 < x0:
(x0,x1) = (x1,x0)
if y1 < y0:
(y0,y1) = (y1,y0)
LTItem.__init__(self, (x0,y0,x1,y1))
if font.is_vertical():
self.size = self.width
else:
self.size = self.height
return
def __repr__(self):
if self.debug:
return ('<%s %s matrix=%s font=%r adv=%s text=%r>' %
(self.__class__.__name__, bbox2str(self.bbox),
matrix2str(self.matrix), self.fontname,
self.adv, self.text))
else:
return '<char %r>' % self.text
def is_compatible(self, obj):
"""Returns True if two characters can coexist in the same line."""
return True
## LTContainer
##
class LTContainer(LTItem):
def __init__(self, bbox):
LTItem.__init__(self, bbox)
self._objs = []
return
def __iter__(self):
return iter(self._objs)
def __len__(self):
return len(self._objs)
def add(self, obj):
self._objs.append(obj)
return
def extend(self, objs):
for obj in objs:
self.add(obj)
return
## LTExpandableContainer
##
class LTExpandableContainer(LTContainer):
def __init__(self):
LTContainer.__init__(self, (+INF,+INF,-INF,-INF))
return
def add(self, obj):
LTContainer.add(self, obj)
self.set_bbox((min(self.x0, obj.x0), min(self.y0, obj.y0),
max(self.x1, obj.x1), max(self.y1, obj.y1)))
return
def analyze(self, laparams):
"""Perform the layout analysis."""
return self
## LTTextLine
##
class LTTextLine(LTExpandableContainer, LTText):
def __init__(self, word_margin):
LTExpandableContainer.__init__(self)
self.word_margin = word_margin
return
def __repr__(self):
return ('<%s %s %r>' %
(self.__class__.__name__, bbox2str(self.bbox), self.text))
def analyze(self, laparams):
LTContainer.add(self, LTAnon('\n'))
self.text = ''.join( obj.text for obj in self if isinstance(obj, LTText) )
return LTExpandableContainer.analyze(self, laparams)
def find_neighbors(self, plane, ratio):
raise NotImplementedError
class LTTextLineHorizontal(LTTextLine):
def __init__(self, word_margin):
LTTextLine.__init__(self, word_margin)
self._x1 = +INF
return
def add(self, obj):
if isinstance(obj, LTChar) and self.word_margin:
margin = self.word_margin * obj.width
if self._x1 < obj.x0-margin:
LTContainer.add(self, LTAnon(' '))
self._x1 = obj.x1
LTTextLine.add(self, obj)
return
def find_neighbors(self, plane, ratio):
h = ratio*self.height
objs = plane.find((self.x0, self.y0-h, self.x1, self.y1+h))
return [ obj for obj in objs if isinstance(obj, LTTextLineHorizontal) ]
class LTTextLineVertical(LTTextLine):
def __init__(self, word_margin):
LTTextLine.__init__(self, word_margin)
self._y0 = -INF
return
def add(self, obj):
if isinstance(obj, LTChar) and self.word_margin:
margin = self.word_margin * obj.height
if obj.y1+margin < self._y0:
LTContainer.add(self, LTAnon(' '))
self._y0 = obj.y0
LTTextLine.add(self, obj)
return
def find_neighbors(self, plane, ratio):
w = ratio*self.width
objs = plane.find((self.x0-w, self.y0, self.x1+w, self.y1))
return [ obj for obj in objs if isinstance(obj, LTTextLineVertical) ]
## LTTextBox
##
## A set of text objects that are grouped within
## a certain rectangular area.
##
class LTTextBox(LTExpandableContainer):
def __init__(self):
LTExpandableContainer.__init__(self)
self.index = None
return
def __repr__(self):
return ('<%s(%s) %s %r...>' %
(self.__class__.__name__, self.index,
bbox2str(self.bbox), self.text[:20]))
def analyze(self, laparams):
self.text = ''.join( obj.text for obj in self if isinstance(obj, LTTextLine) )
return LTExpandableContainer.analyze(self, laparams)
class LTTextBoxHorizontal(LTTextBox):
def analyze(self, laparams):
self._objs = csort(self._objs, key=lambda obj: -obj.y1)
return LTTextBox.analyze(self, laparams)
def get_writing_mode(self):
return 'lr-tb'
class LTTextBoxVertical(LTTextBox):
def analyze(self, laparams):
self._objs = csort(self._objs, key=lambda obj: -obj.x1)
return LTTextBox.analyze(self, laparams)
def get_writing_mode(self):
return 'tb-rl'
## LTTextGroup
##
class LTTextGroup(LTExpandableContainer):
def __init__(self, objs):
LTExpandableContainer.__init__(self)
self.extend(objs)
return
class LTTextGroupLRTB(LTTextGroup):
def analyze(self, laparams):
# reorder the objects from top-left to bottom-right.
self._objs = csort(self._objs, key=lambda obj:
(1-laparams.boxes_flow)*(obj.x0) -
(1+laparams.boxes_flow)*(obj.y0+obj.y1))
return LTTextGroup.analyze(self, laparams)
class LTTextGroupTBRL(LTTextGroup):
def analyze(self, laparams):
# reorder the objects from top-right to bottom-left.
self._objs = csort(self._objs, key=lambda obj:
-(1+laparams.boxes_flow)*(obj.x0+obj.x1)
-(1-laparams.boxes_flow)*(obj.y1))
return LTTextGroup.analyze(self, laparams)
## LTLayoutContainer
##
class LTLayoutContainer(LTContainer):
def __init__(self, bbox):
LTContainer.__init__(self, bbox)
self.layout = None
return
def analyze(self, laparams):
# textobjs is a list of LTChar objects, i.e.
# it has all the individual characters in the page.
(textobjs, otherobjs) = fsplit(lambda obj: isinstance(obj, LTChar), self._objs)
if not textobjs: return
textlines = list(self.get_textlines(laparams, textobjs))
assert len(textobjs) <= sum( len(line._objs) for line in textlines )
(empties, textlines) = fsplit(lambda obj: obj.is_empty(), textlines)
textboxes = list(self.get_textboxes(laparams, textlines))
assert len(textlines) == sum( len(box._objs) for box in textboxes )
top = self.group_textboxes(laparams, textboxes)
def assign_index(obj, i):
if isinstance(obj, LTTextBox):
obj.index = i
i += 1
elif isinstance(obj, LTTextGroup):
for x in obj:
i = assign_index(x, i)
return i
assign_index(top, 0)
textboxes.sort(key=lambda box:box.index)
self._objs = textboxes + otherobjs + empties
self.layout = top
return self
def get_textlines(self, laparams, objs):
obj0 = None
line = None
for obj1 in objs:
if obj0 is not None:
k = 0
if (obj0.is_compatible(obj1) and obj0.is_voverlap(obj1) and
min(obj0.height, obj1.height) * laparams.line_overlap < obj0.voverlap(obj1) and
obj0.hdistance(obj1) < max(obj0.width, obj1.width) * laparams.char_margin):
# obj0 and obj1 is horizontally aligned:
#
# +------+ - - -
# | obj0 | - - +------+ -
# | | | obj1 | | (line_overlap)
# +------+ - - | | -
# - - - +------+
#
# |<--->|
# (char_margin)
k |= 1
if (laparams.detect_vertical and
obj0.is_compatible(obj1) and obj0.is_hoverlap(obj1) and
min(obj0.width, obj1.width) * laparams.line_overlap < obj0.hoverlap(obj1) and
obj0.vdistance(obj1) < max(obj0.height, obj1.height) * laparams.char_margin):
# obj0 and obj1 is vertically aligned:
#
# +------+
# | obj0 |
# | |
# +------+ - - -
# | | | (char_margin)
# +------+ - -
# | obj1 |
# | |
# +------+
#
# |<-->|
# (line_overlap)
k |= 2
if ( (k & 1 and isinstance(line, LTTextLineHorizontal)) or
(k & 2 and isinstance(line, LTTextLineVertical)) ):
line.add(obj1)
elif line is not None:
yield line.analyze(laparams)
line = None
else:
if k == 2:
line = LTTextLineVertical(laparams.word_margin)
line.add(obj0)
line.add(obj1)
elif k == 1:
line = LTTextLineHorizontal(laparams.word_margin)
line.add(obj0)
line.add(obj1)
else:
line = LTTextLineHorizontal(laparams.word_margin)
line.add(obj0)
yield line.analyze(laparams)
line = None
obj0 = obj1
if line is None:
line = LTTextLineHorizontal(laparams.word_margin)
line.add(obj0)
yield line.analyze(laparams)
return
def get_textboxes(self, laparams, lines):
plane = Plane(lines)
boxes = {}
for line in lines:
neighbors = line.find_neighbors(plane, laparams.line_margin)
assert line in neighbors, line
members = []
for obj1 in neighbors:
members.append(obj1)
if obj1 in boxes:
members.extend(boxes.pop(obj1))
if isinstance(line, LTTextLineHorizontal):
box = LTTextBoxHorizontal()
else:
box = LTTextBoxVertical()
for obj in uniq(members):
box.add(obj)
boxes[obj] = box
done = set()
for line in lines:
box = boxes[line]
if box in done: continue
done.add(box)
yield box.analyze(laparams)
return
def group_textboxes(self, laparams, boxes):
def dist((x0,y0,x1,y1), obj1, obj2):
"""A distance function between two TextBoxes.
Consider the bounding rectangle for obj1 and obj2.
Return its area less the areas of obj1 and obj2,
shown as 'www' below. This value may be negative.
+------+..........+ (x1,y1)
| obj1 |wwwwwwwwww:
+------+www+------+
:wwwwwwwwww| obj2 |
(x0,y0) +..........+------+
"""
return ((x1-x0)*(y1-y0) - obj1.width*obj1.height - obj2.width*obj2.height)
boxes = boxes[:]
# XXX this is very slow when there're many textboxes.
while 2 <= len(boxes):
mindist = (INF,0)
minpair = None
plane = Plane(boxes)
boxes = csort(boxes, key=lambda obj: obj.width*obj.height)
for i in xrange(len(boxes)):
for j in xrange(i+1, len(boxes)):
(obj1, obj2) = (boxes[i], boxes[j])
b = (min(obj1.x0,obj2.x0), min(obj1.y0,obj2.y0),
max(obj1.x1,obj2.x1), max(obj1.y1,obj2.y1))
others = set(plane.find(b)).difference((obj1,obj2))
d = dist(b, obj1, obj2)
# disregard if there's any other object in between.
if 0 < d and others:
d = (1,d)
else:
d = (0,d)
if mindist <= d: continue
mindist = d
minpair = (obj1, obj2)
assert minpair is not None, boxes
(obj1, obj2) = minpair
boxes.remove(obj1)
boxes.remove(obj2)
if (isinstance(obj1, LTTextBoxVertical) or
isinstance(obj2, LTTextBoxVertical) or
isinstance(obj1, LTTextGroupTBRL) or
isinstance(obj2, LTTextGroupTBRL)):
group = LTTextGroupTBRL([obj1, obj2])
else:
group = LTTextGroupLRTB([obj1, obj2])
boxes.append(group.analyze(laparams))
assert len(boxes) == 1
return boxes.pop()
## LTFigure
##
class LTFigure(LTLayoutContainer):
def __init__(self, name, bbox, matrix):
self.name = name
self.matrix = matrix
(x,y,w,h) = bbox
bbox = get_bound( apply_matrix_pt(matrix, (p,q))
for (p,q) in ((x,y), (x+w,y), (x,y+h), (x+w,y+h)) )
LTLayoutContainer.__init__(self, bbox)
return
def __repr__(self):
return ('<%s(%s) %s matrix=%s>' %
(self.__class__.__name__, self.name,
bbox2str(self.bbox), matrix2str(self.matrix)))
def analyze(self, laparams):
if not laparams.all_texts: return
return LTLayoutContainer.analyze(self, laparams)
## LTPage
##
class LTPage(LTLayoutContainer):
def __init__(self, pageid, bbox, rotate=0):
LTLayoutContainer.__init__(self, bbox)
self.pageid = pageid
self.rotate = rotate
return
def __repr__(self):
return ('<%s(%r) %s rotate=%r>' %
(self.__class__.__name__, self.pageid,
bbox2str(self.bbox), self.rotate))
| gpl-2.0 |
ccn-2m/django | django/contrib/localflavor/gb/forms.py | 110 | 1992 | """
GB-specific Form helpers
"""
from __future__ import absolute_import, unicode_literals
import re
from django.contrib.localflavor.gb.gb_regions import GB_NATIONS_CHOICES, GB_REGION_CHOICES
from django.forms.fields import CharField, Select
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
class GBPostcodeField(CharField):
"""
A form field that validates its input is a UK postcode.
The regular expression used is sourced from the schema for British Standard
BS7666 address types: http://www.govtalk.gov.uk/gdsc/schemas/bs7666-v2-0.xsd
The value is uppercased and a space added in the correct place, if required.
"""
default_error_messages = {
'invalid': _('Enter a valid postcode.'),
}
outcode_pattern = '[A-PR-UWYZ]([0-9]{1,2}|([A-HIK-Y][0-9](|[0-9]|[ABEHMNPRVWXY]))|[0-9][A-HJKSTUW])'
incode_pattern = '[0-9][ABD-HJLNP-UW-Z]{2}'
postcode_regex = re.compile(r'^(GIR 0AA|%s %s)$' % (outcode_pattern, incode_pattern))
space_regex = re.compile(r' *(%s)$' % incode_pattern)
def clean(self, value):
value = super(GBPostcodeField, self).clean(value)
if value == '':
return value
postcode = value.upper().strip()
# Put a single space before the incode (second part).
postcode = self.space_regex.sub(r' \1', postcode)
if not self.postcode_regex.search(postcode):
raise ValidationError(self.error_messages['invalid'])
return postcode
class GBCountySelect(Select):
"""
A Select widget that uses a list of UK Counties/Regions as its choices.
"""
def __init__(self, attrs=None):
super(GBCountySelect, self).__init__(attrs, choices=GB_REGION_CHOICES)
class GBNationSelect(Select):
"""
A Select widget that uses a list of UK Nations as its choices.
"""
def __init__(self, attrs=None):
super(GBNationSelect, self).__init__(attrs, choices=GB_NATIONS_CHOICES)
| bsd-3-clause |
pelya/commandergenius | project/jni/python/src/Lib/json/__init__.py | 57 | 12286 | r"""A simple, fast, extensible JSON encoder and decoder
JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
json exposes an API familiar to uses of the standard library
marshal and pickle modules.
Encoding basic Python object hierarchies::
>>> import json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing (using repr() because of extraneous whitespace in the output)::
>>> import json
>>> print repr(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
'{\n "4": 5, \n "6": 7\n}'
Decoding JSON::
>>> import json
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
[u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('"\\"foo\\bar"')
u'"foo\x08ar'
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)
[u'streaming API']
Specializing JSON object decoding::
>>> import json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal)
Decimal('1.1')
Extending JSONEncoder::
>>> import json
>>> class ComplexEncoder(json.JSONEncoder):
... def default(self, obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... return json.JSONEncoder.default(self, obj)
...
>>> dumps(2 + 1j, cls=ComplexEncoder)
'[2.0, 1.0]'
>>> ComplexEncoder().encode(2 + 1j)
'[2.0, 1.0]'
>>> list(ComplexEncoder().iterencode(2 + 1j))
['[', '2.0', ', ', '1.0', ']']
Using json.tool from the shell to validate and
pretty-print::
$ echo '{"json":"obj"}' | python -mjson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -mjson.tool
Expecting property name: line 1 column 2 (char 2)
Note that the JSON produced by this module's default settings
is a subset of YAML, so it may be used as a serializer for that as well.
"""
__version__ = '1.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from .decoder import JSONDecoder
from .encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object
containing a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| lgpl-2.1 |
biddyweb/androguard | demos/disassembler_exceptions.py | 38 | 1301 | #!/usr/bin/env python
import sys, hashlib
PATH_INSTALL = "./"
sys.path.append(PATH_INSTALL)
from androguard.core.androgen import AndroguardS
from androguard.core.analysis import analysis
from androguard.core.bytecodes import dvm
TEST = 'examples/android/TestsAndroguard/bin/classes.dex'
a = AndroguardS( TEST )
x = analysis.VMAnalysis( a.get_vm() )
# CFG
for method in a.get_methods() :
g = x.get_method( method )
# Display only methods with exceptions
if method.get_code() == None :
continue
if method.get_code().tries_size <= 0 :
continue
print method.get_class_name(), method.get_name(), method.get_descriptor(), method.get_code().get_length(), method.get_code().registers_size
idx = 0
for i in g.basic_blocks.get() :
print "\t %s %x %x" % (i.name, i.start, i.end), '[ NEXT = ', ', '.join( "%x-%x-%s" % (j[0], j[1], j[2].get_name()) for j in i.childs ), ']', '[ PREV = ', ', '.join( j[2].get_name() for j in i.fathers ), ']'
for ins in i.get_instructions() :
print "\t\t %x" % idx, ins.get_name(), ins.get_output()
idx += ins.get_length()
print ""
for i in g.exceptions.gets() :
print '%x %x %s' % (i.start, i.end, i.exceptions)
print dvm.determineException(a.get_vm(), method)
| apache-2.0 |
ryfeus/lambda-packs | LightGBM_sklearn_scipy_numpy/source/sklearn/metrics/pairwise.py | 7 | 47000 | # -*- coding: utf-8 -*-
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Mathieu Blondel <mathieu@mblondel.org>
# Robert Layton <robertlayton@gmail.com>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Philippe Gervais <philippe.gervais@inria.fr>
# Lars Buitinck
# Joel Nothman <joel.nothman@gmail.com>
# License: BSD 3 clause
import itertools
from functools import partial
import warnings
import numpy as np
from scipy.spatial import distance
from scipy.sparse import csr_matrix
from scipy.sparse import issparse
from ..utils import check_array
from ..utils import gen_even_slices
from ..utils import gen_batches
from ..utils.extmath import row_norms, safe_sparse_dot
from ..preprocessing import normalize
from ..externals.joblib import Parallel
from ..externals.joblib import delayed
from ..externals.joblib import cpu_count
from .pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
# Utility Functions
def _return_float_dtype(X, Y):
"""
1. If dtype of X and Y is float32, then dtype float32 is returned.
2. Else dtype float is returned.
"""
if not issparse(X) and not isinstance(X, np.ndarray):
X = np.asarray(X)
if Y is None:
Y_dtype = X.dtype
elif not issparse(Y) and not isinstance(Y, np.ndarray):
Y = np.asarray(Y)
Y_dtype = Y.dtype
else:
Y_dtype = Y.dtype
if X.dtype == Y_dtype == np.float32:
dtype = np.float32
else:
dtype = np.float
return X, Y, dtype
def check_pairwise_arrays(X, Y, precomputed=False, dtype=None):
""" Set X and Y appropriately and checks inputs
If Y is None, it is set as a pointer to X (i.e. not a copy).
If Y is given, this does not happen.
All distance metrics should use this function first to assert that the
given parameters are correct and safe to use.
Specifically, this function first ensures that both X and Y are arrays,
then checks that they are at least two dimensional while ensuring that
their elements are floats (or dtype if provided). Finally, the function
checks that the size of the second dimension of the two arrays is equal, or
the equivalent check for a precomputed distance matrix.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
precomputed : bool
True if X is to be treated as precomputed distances to the samples in
Y.
dtype : string, type, list of types or None (default=None)
Data type required for X and Y. If None, the dtype will be an
appropriate float type selected by _return_float_dtype.
.. versionadded:: 0.18
Returns
-------
safe_X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
An array equal to X, guaranteed to be a numpy array.
safe_Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
An array equal to Y if Y was not None, guaranteed to be a numpy array.
If Y was None, safe_Y will be a pointer to X.
"""
X, Y, dtype_float = _return_float_dtype(X, Y)
warn_on_dtype = dtype is not None
estimator = 'check_pairwise_arrays'
if dtype is None:
dtype = dtype_float
if Y is X or Y is None:
X = Y = check_array(X, accept_sparse='csr', dtype=dtype,
warn_on_dtype=warn_on_dtype, estimator=estimator)
else:
X = check_array(X, accept_sparse='csr', dtype=dtype,
warn_on_dtype=warn_on_dtype, estimator=estimator)
Y = check_array(Y, accept_sparse='csr', dtype=dtype,
warn_on_dtype=warn_on_dtype, estimator=estimator)
if precomputed:
if X.shape[1] != Y.shape[0]:
raise ValueError("Precomputed metric requires shape "
"(n_queries, n_indexed). Got (%d, %d) "
"for %d indexed." %
(X.shape[0], X.shape[1], Y.shape[0]))
elif X.shape[1] != Y.shape[1]:
raise ValueError("Incompatible dimension for X and Y matrices: "
"X.shape[1] == %d while Y.shape[1] == %d" % (
X.shape[1], Y.shape[1]))
return X, Y
def check_paired_arrays(X, Y):
""" Set X and Y appropriately and checks inputs for paired distances
All paired distance metrics should use this function first to assert that
the given parameters are correct and safe to use.
Specifically, this function first ensures that both X and Y are arrays,
then checks that they are at least two dimensional while ensuring that
their elements are floats. Finally, the function checks that the size
of the dimensions of the two arrays are equal.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
Returns
-------
safe_X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
An array equal to X, guaranteed to be a numpy array.
safe_Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
An array equal to Y if Y was not None, guaranteed to be a numpy array.
If Y was None, safe_Y will be a pointer to X.
"""
X, Y = check_pairwise_arrays(X, Y)
if X.shape != Y.shape:
raise ValueError("X and Y should be of same shape. They were "
"respectively %r and %r long." % (X.shape, Y.shape))
return X, Y
# Pairwise distances
def euclidean_distances(X, Y=None, Y_norm_squared=None, squared=False,
X_norm_squared=None):
"""
Considering the rows of X (and Y=X) as vectors, compute the
distance matrix between each pair of vectors.
For efficiency reasons, the euclidean distance between a pair of row
vector x and y is computed as::
dist(x, y) = sqrt(dot(x, x) - 2 * dot(x, y) + dot(y, y))
This formulation has two advantages over other ways of computing distances.
First, it is computationally efficient when dealing with sparse data.
Second, if one argument varies but the other remains unchanged, then
`dot(x, x)` and/or `dot(y, y)` can be pre-computed.
However, this is not the most precise way of doing this computation, and
the distance matrix returned by this function may not be exactly
symmetric as required by, e.g., ``scipy.spatial.distance`` functions.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_1, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_2, n_features)
Y_norm_squared : array-like, shape (n_samples_2, ), optional
Pre-computed dot-products of vectors in Y (e.g.,
``(Y**2).sum(axis=1)``)
squared : boolean, optional
Return squared Euclidean distances.
X_norm_squared : array-like, shape = [n_samples_1], optional
Pre-computed dot-products of vectors in X (e.g.,
``(X**2).sum(axis=1)``)
Returns
-------
distances : {array, sparse matrix}, shape (n_samples_1, n_samples_2)
Examples
--------
>>> from sklearn.metrics.pairwise import euclidean_distances
>>> X = [[0, 1], [1, 1]]
>>> # distance between rows of X
>>> euclidean_distances(X, X)
array([[ 0., 1.],
[ 1., 0.]])
>>> # get distance to origin
>>> euclidean_distances(X, [[0, 0]])
array([[ 1. ],
[ 1.41421356]])
See also
--------
paired_distances : distances betweens pairs of elements of X and Y.
"""
X, Y = check_pairwise_arrays(X, Y)
if X_norm_squared is not None:
XX = check_array(X_norm_squared)
if XX.shape == (1, X.shape[0]):
XX = XX.T
elif XX.shape != (X.shape[0], 1):
raise ValueError(
"Incompatible dimensions for X and X_norm_squared")
else:
XX = row_norms(X, squared=True)[:, np.newaxis]
if X is Y: # shortcut in the common case euclidean_distances(X, X)
YY = XX.T
elif Y_norm_squared is not None:
YY = np.atleast_2d(Y_norm_squared)
if YY.shape != (1, Y.shape[0]):
raise ValueError(
"Incompatible dimensions for Y and Y_norm_squared")
else:
YY = row_norms(Y, squared=True)[np.newaxis, :]
distances = safe_sparse_dot(X, Y.T, dense_output=True)
distances *= -2
distances += XX
distances += YY
np.maximum(distances, 0, out=distances)
if X is Y:
# Ensure that distances between vectors and themselves are set to 0.0.
# This may not be the case due to floating point rounding errors.
distances.flat[::distances.shape[0] + 1] = 0.0
return distances if squared else np.sqrt(distances, out=distances)
def pairwise_distances_argmin_min(X, Y, axis=1, metric="euclidean",
batch_size=500, metric_kwargs=None):
"""Compute minimum distances between one point and a set of points.
This function computes for each row in X, the index of the row of Y which
is closest (according to the specified distance). The minimal distances are
also returned.
This is mostly equivalent to calling:
(pairwise_distances(X, Y=Y, metric=metric).argmin(axis=axis),
pairwise_distances(X, Y=Y, metric=metric).min(axis=axis))
but uses much less memory, and is faster for large arrays.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples1, n_features)
Array containing points.
Y : {array-like, sparse matrix}, shape (n_samples2, n_features)
Arrays containing points.
axis : int, optional, default 1
Axis along which the argmin and distances are to be computed.
metric : string or callable, default 'euclidean'
metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string.
Distance matrices are not supported.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
batch_size : integer
To reduce memory consumption over the naive solution, data are
processed in batches, comprising batch_size rows of X and
batch_size rows of Y. The default value is quite conservative, but
can be changed for fine-tuning. The larger the number, the larger the
memory usage.
metric_kwargs : dict, optional
Keyword arguments to pass to specified metric function.
Returns
-------
argmin : numpy.ndarray
Y[argmin[i], :] is the row in Y that is closest to X[i, :].
distances : numpy.ndarray
distances[i] is the distance between the i-th row in X and the
argmin[i]-th row in Y.
See also
--------
sklearn.metrics.pairwise_distances
sklearn.metrics.pairwise_distances_argmin
"""
dist_func = None
if metric in PAIRWISE_DISTANCE_FUNCTIONS:
dist_func = PAIRWISE_DISTANCE_FUNCTIONS[metric]
elif not callable(metric) and not isinstance(metric, str):
raise ValueError("'metric' must be a string or a callable")
X, Y = check_pairwise_arrays(X, Y)
if metric_kwargs is None:
metric_kwargs = {}
if axis == 0:
X, Y = Y, X
# Allocate output arrays
indices = np.empty(X.shape[0], dtype=np.intp)
values = np.empty(X.shape[0])
values.fill(np.infty)
for chunk_x in gen_batches(X.shape[0], batch_size):
X_chunk = X[chunk_x, :]
for chunk_y in gen_batches(Y.shape[0], batch_size):
Y_chunk = Y[chunk_y, :]
if dist_func is not None:
if metric == 'euclidean': # special case, for speed
d_chunk = safe_sparse_dot(X_chunk, Y_chunk.T,
dense_output=True)
d_chunk *= -2
d_chunk += row_norms(X_chunk, squared=True)[:, np.newaxis]
d_chunk += row_norms(Y_chunk, squared=True)[np.newaxis, :]
np.maximum(d_chunk, 0, d_chunk)
else:
d_chunk = dist_func(X_chunk, Y_chunk, **metric_kwargs)
else:
d_chunk = pairwise_distances(X_chunk, Y_chunk,
metric=metric, **metric_kwargs)
# Update indices and minimum values using chunk
min_indices = d_chunk.argmin(axis=1)
min_values = d_chunk[np.arange(chunk_x.stop - chunk_x.start),
min_indices]
flags = values[chunk_x] > min_values
indices[chunk_x][flags] = min_indices[flags] + chunk_y.start
values[chunk_x][flags] = min_values[flags]
if metric == "euclidean" and not metric_kwargs.get("squared", False):
np.sqrt(values, values)
return indices, values
def pairwise_distances_argmin(X, Y, axis=1, metric="euclidean",
batch_size=500, metric_kwargs=None):
"""Compute minimum distances between one point and a set of points.
This function computes for each row in X, the index of the row of Y which
is closest (according to the specified distance).
This is mostly equivalent to calling:
pairwise_distances(X, Y=Y, metric=metric).argmin(axis=axis)
but uses much less memory, and is faster for large arrays.
This function works with dense 2D arrays only.
Parameters
----------
X : array-like
Arrays containing points. Respective shapes (n_samples1, n_features)
and (n_samples2, n_features)
Y : array-like
Arrays containing points. Respective shapes (n_samples1, n_features)
and (n_samples2, n_features)
axis : int, optional, default 1
Axis along which the argmin and distances are to be computed.
metric : string or callable
metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string.
Distance matrices are not supported.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
batch_size : integer
To reduce memory consumption over the naive solution, data are
processed in batches, comprising batch_size rows of X and
batch_size rows of Y. The default value is quite conservative, but
can be changed for fine-tuning. The larger the number, the larger the
memory usage.
metric_kwargs : dict
keyword arguments to pass to specified metric function.
Returns
-------
argmin : numpy.ndarray
Y[argmin[i], :] is the row in Y that is closest to X[i, :].
See also
--------
sklearn.metrics.pairwise_distances
sklearn.metrics.pairwise_distances_argmin_min
"""
if metric_kwargs is None:
metric_kwargs = {}
return pairwise_distances_argmin_min(X, Y, axis, metric, batch_size,
metric_kwargs)[0]
def manhattan_distances(X, Y=None, sum_over_features=True,
size_threshold=None):
""" Compute the L1 distances between the vectors in X and Y.
With sum_over_features equal to False it returns the componentwise
distances.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array_like
An array with shape (n_samples_X, n_features).
Y : array_like, optional
An array with shape (n_samples_Y, n_features).
sum_over_features : bool, default=True
If True the function returns the pairwise distance matrix
else it returns the componentwise L1 pairwise-distances.
Not supported for sparse matrix inputs.
size_threshold : int, default=5e8
Unused parameter.
Returns
-------
D : array
If sum_over_features is False shape is
(n_samples_X * n_samples_Y, n_features) and D contains the
componentwise L1 pairwise-distances (ie. absolute difference),
else shape is (n_samples_X, n_samples_Y) and D contains
the pairwise L1 distances.
Examples
--------
>>> from sklearn.metrics.pairwise import manhattan_distances
>>> manhattan_distances([[3]], [[3]])#doctest:+ELLIPSIS
array([[ 0.]])
>>> manhattan_distances([[3]], [[2]])#doctest:+ELLIPSIS
array([[ 1.]])
>>> manhattan_distances([[2]], [[3]])#doctest:+ELLIPSIS
array([[ 1.]])
>>> manhattan_distances([[1, 2], [3, 4]],\
[[1, 2], [0, 3]])#doctest:+ELLIPSIS
array([[ 0., 2.],
[ 4., 4.]])
>>> import numpy as np
>>> X = np.ones((1, 2))
>>> y = 2 * np.ones((2, 2))
>>> manhattan_distances(X, y, sum_over_features=False)#doctest:+ELLIPSIS
array([[ 1., 1.],
[ 1., 1.]]...)
"""
if size_threshold is not None:
warnings.warn('Use of the "size_threshold" is deprecated '
'in 0.19 and it will be removed version '
'0.21 of scikit-learn', DeprecationWarning)
X, Y = check_pairwise_arrays(X, Y)
if issparse(X) or issparse(Y):
if not sum_over_features:
raise TypeError("sum_over_features=%r not supported"
" for sparse matrices" % sum_over_features)
X = csr_matrix(X, copy=False)
Y = csr_matrix(Y, copy=False)
D = np.zeros((X.shape[0], Y.shape[0]))
_sparse_manhattan(X.data, X.indices, X.indptr,
Y.data, Y.indices, Y.indptr,
X.shape[1], D)
return D
if sum_over_features:
return distance.cdist(X, Y, 'cityblock')
D = X[:, np.newaxis, :] - Y[np.newaxis, :, :]
D = np.abs(D, D)
return D.reshape((-1, X.shape[1]))
def cosine_distances(X, Y=None):
"""Compute cosine distance between samples in X and Y.
Cosine distance is defined as 1.0 minus the cosine similarity.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array_like, sparse matrix
with shape (n_samples_X, n_features).
Y : array_like, sparse matrix (optional)
with shape (n_samples_Y, n_features).
Returns
-------
distance matrix : array
An array with shape (n_samples_X, n_samples_Y).
See also
--------
sklearn.metrics.pairwise.cosine_similarity
scipy.spatial.distance.cosine (dense matrices only)
"""
# 1.0 - cosine_similarity(X, Y) without copy
S = cosine_similarity(X, Y)
S *= -1
S += 1
np.clip(S, 0, 2, out=S)
if X is Y or Y is None:
# Ensure that distances between vectors and themselves are set to 0.0.
# This may not be the case due to floating point rounding errors.
S[np.diag_indices_from(S)] = 0.0
return S
# Paired distances
def paired_euclidean_distances(X, Y):
"""
Computes the paired euclidean distances between X and Y
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray (n_samples, )
"""
X, Y = check_paired_arrays(X, Y)
return row_norms(X - Y)
def paired_manhattan_distances(X, Y):
"""Compute the L1 distances between the vectors in X and Y.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray (n_samples, )
"""
X, Y = check_paired_arrays(X, Y)
diff = X - Y
if issparse(diff):
diff.data = np.abs(diff.data)
return np.squeeze(np.array(diff.sum(axis=1)))
else:
return np.abs(diff).sum(axis=-1)
def paired_cosine_distances(X, Y):
"""
Computes the paired cosine distances between X and Y
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray, shape (n_samples, )
Notes
------
The cosine distance is equivalent to the half the squared
euclidean distance if each sample is normalized to unit norm
"""
X, Y = check_paired_arrays(X, Y)
return .5 * row_norms(normalize(X) - normalize(Y), squared=True)
PAIRED_DISTANCES = {
'cosine': paired_cosine_distances,
'euclidean': paired_euclidean_distances,
'l2': paired_euclidean_distances,
'l1': paired_manhattan_distances,
'manhattan': paired_manhattan_distances,
'cityblock': paired_manhattan_distances}
def paired_distances(X, Y, metric="euclidean", **kwds):
"""
Computes the paired distances between X and Y.
Computes the distances between (X[0], Y[0]), (X[1], Y[1]), etc...
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : ndarray (n_samples, n_features)
Array 1 for distance computation.
Y : ndarray (n_samples, n_features)
Array 2 for distance computation.
metric : string or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
specified in PAIRED_DISTANCES, including "euclidean",
"manhattan", or "cosine".
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
Returns
-------
distances : ndarray (n_samples, )
Examples
--------
>>> from sklearn.metrics.pairwise import paired_distances
>>> X = [[0, 1], [1, 1]]
>>> Y = [[0, 1], [2, 1]]
>>> paired_distances(X, Y)
array([ 0., 1.])
See also
--------
pairwise_distances : pairwise distances.
"""
if metric in PAIRED_DISTANCES:
func = PAIRED_DISTANCES[metric]
return func(X, Y)
elif callable(metric):
# Check the matrix first (it is usually done by the metric)
X, Y = check_paired_arrays(X, Y)
distances = np.zeros(len(X))
for i in range(len(X)):
distances[i] = metric(X[i], Y[i])
return distances
else:
raise ValueError('Unknown distance %s' % metric)
# Kernels
def linear_kernel(X, Y=None):
"""
Compute the linear kernel between X and Y.
Read more in the :ref:`User Guide <linear_kernel>`.
Parameters
----------
X : array of shape (n_samples_1, n_features)
Y : array of shape (n_samples_2, n_features)
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
return safe_sparse_dot(X, Y.T, dense_output=True)
def polynomial_kernel(X, Y=None, degree=3, gamma=None, coef0=1):
"""
Compute the polynomial kernel between X and Y::
K(X, Y) = (gamma <X, Y> + coef0)^degree
Read more in the :ref:`User Guide <polynomial_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
degree : int, default 3
gamma : float, default None
if None, defaults to 1.0 / n_features
coef0 : int, default 1
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = safe_sparse_dot(X, Y.T, dense_output=True)
K *= gamma
K += coef0
K **= degree
return K
def sigmoid_kernel(X, Y=None, gamma=None, coef0=1):
"""
Compute the sigmoid kernel between X and Y::
K(X, Y) = tanh(gamma <X, Y> + coef0)
Read more in the :ref:`User Guide <sigmoid_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
gamma : float, default None
If None, defaults to 1.0 / n_features
coef0 : int, default 1
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = safe_sparse_dot(X, Y.T, dense_output=True)
K *= gamma
K += coef0
np.tanh(K, K) # compute tanh in-place
return K
def rbf_kernel(X, Y=None, gamma=None):
"""
Compute the rbf (gaussian) kernel between X and Y::
K(x, y) = exp(-gamma ||x-y||^2)
for each pair of rows x in X and y in Y.
Read more in the :ref:`User Guide <rbf_kernel>`.
Parameters
----------
X : array of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
gamma : float, default None
If None, defaults to 1.0 / n_features
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = euclidean_distances(X, Y, squared=True)
K *= -gamma
np.exp(K, K) # exponentiate K in-place
return K
def laplacian_kernel(X, Y=None, gamma=None):
"""Compute the laplacian kernel between X and Y.
The laplacian kernel is defined as::
K(x, y) = exp(-gamma ||x-y||_1)
for each pair of rows x in X and y in Y.
Read more in the :ref:`User Guide <laplacian_kernel>`.
.. versionadded:: 0.17
Parameters
----------
X : array of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
gamma : float, default None
If None, defaults to 1.0 / n_features
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = -gamma * manhattan_distances(X, Y)
np.exp(K, K) # exponentiate K in-place
return K
def cosine_similarity(X, Y=None, dense_output=True):
"""Compute cosine similarity between samples in X and Y.
Cosine similarity, or the cosine kernel, computes similarity as the
normalized dot product of X and Y:
K(X, Y) = <X, Y> / (||X||*||Y||)
On L2-normalized data, this function is equivalent to linear_kernel.
Read more in the :ref:`User Guide <cosine_similarity>`.
Parameters
----------
X : ndarray or sparse array, shape: (n_samples_X, n_features)
Input data.
Y : ndarray or sparse array, shape: (n_samples_Y, n_features)
Input data. If ``None``, the output will be the pairwise
similarities between all samples in ``X``.
dense_output : boolean (optional), default True
Whether to return dense output even when the input is sparse. If
``False``, the output is sparse if both input arrays are sparse.
.. versionadded:: 0.17
parameter ``dense_output`` for dense output.
Returns
-------
kernel matrix : array
An array with shape (n_samples_X, n_samples_Y).
"""
# to avoid recursive import
X, Y = check_pairwise_arrays(X, Y)
X_normalized = normalize(X, copy=True)
if X is Y:
Y_normalized = X_normalized
else:
Y_normalized = normalize(Y, copy=True)
K = safe_sparse_dot(X_normalized, Y_normalized.T, dense_output=dense_output)
return K
def additive_chi2_kernel(X, Y=None):
"""Computes the additive chi-squared kernel between observations in X and Y
The chi-squared kernel is computed between each pair of rows in X and Y. X
and Y have to be non-negative. This kernel is most commonly applied to
histograms.
The chi-squared kernel is given by::
k(x, y) = -Sum [(x - y)^2 / (x + y)]
It can be interpreted as a weighted difference per entry.
Read more in the :ref:`User Guide <chi2_kernel>`.
Notes
-----
As the negative of a distance, this kernel is only conditionally positive
definite.
Parameters
----------
X : array-like of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
References
----------
* Zhang, J. and Marszalek, M. and Lazebnik, S. and Schmid, C.
Local features and kernels for classification of texture and object
categories: A comprehensive study
International Journal of Computer Vision 2007
http://research.microsoft.com/en-us/um/people/manik/projects/trade-off/papers/ZhangIJCV06.pdf
See also
--------
chi2_kernel : The exponentiated version of the kernel, which is usually
preferable.
sklearn.kernel_approximation.AdditiveChi2Sampler : A Fourier approximation
to this kernel.
"""
if issparse(X) or issparse(Y):
raise ValueError("additive_chi2 does not support sparse matrices.")
X, Y = check_pairwise_arrays(X, Y)
if (X < 0).any():
raise ValueError("X contains negative values.")
if Y is not X and (Y < 0).any():
raise ValueError("Y contains negative values.")
result = np.zeros((X.shape[0], Y.shape[0]), dtype=X.dtype)
_chi2_kernel_fast(X, Y, result)
return result
def chi2_kernel(X, Y=None, gamma=1.):
"""Computes the exponential chi-squared kernel X and Y.
The chi-squared kernel is computed between each pair of rows in X and Y. X
and Y have to be non-negative. This kernel is most commonly applied to
histograms.
The chi-squared kernel is given by::
k(x, y) = exp(-gamma Sum [(x - y)^2 / (x + y)])
It can be interpreted as a weighted difference per entry.
Read more in the :ref:`User Guide <chi2_kernel>`.
Parameters
----------
X : array-like of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
gamma : float, default=1.
Scaling parameter of the chi2 kernel.
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
References
----------
* Zhang, J. and Marszalek, M. and Lazebnik, S. and Schmid, C.
Local features and kernels for classification of texture and object
categories: A comprehensive study
International Journal of Computer Vision 2007
http://research.microsoft.com/en-us/um/people/manik/projects/trade-off/papers/ZhangIJCV06.pdf
See also
--------
additive_chi2_kernel : The additive version of this kernel
sklearn.kernel_approximation.AdditiveChi2Sampler : A Fourier approximation
to the additive version of this kernel.
"""
K = additive_chi2_kernel(X, Y)
K *= gamma
return np.exp(K, K)
# Helper functions - distance
PAIRWISE_DISTANCE_FUNCTIONS = {
# If updating this dictionary, update the doc in both distance_metrics()
# and also in pairwise_distances()!
'cityblock': manhattan_distances,
'cosine': cosine_distances,
'euclidean': euclidean_distances,
'l2': euclidean_distances,
'l1': manhattan_distances,
'manhattan': manhattan_distances,
'precomputed': None, # HACK: precomputed is always allowed, never called
}
def distance_metrics():
"""Valid metrics for pairwise_distances.
This function simply returns the valid pairwise distance metrics.
It exists to allow for a description of the mapping for
each of the valid strings.
The valid distance metrics, and the function they map to, are:
============ ====================================
metric Function
============ ====================================
'cityblock' metrics.pairwise.manhattan_distances
'cosine' metrics.pairwise.cosine_distances
'euclidean' metrics.pairwise.euclidean_distances
'l1' metrics.pairwise.manhattan_distances
'l2' metrics.pairwise.euclidean_distances
'manhattan' metrics.pairwise.manhattan_distances
============ ====================================
Read more in the :ref:`User Guide <metrics>`.
"""
return PAIRWISE_DISTANCE_FUNCTIONS
def _parallel_pairwise(X, Y, func, n_jobs, **kwds):
"""Break the pairwise matrix in n_jobs even slices
and compute them in parallel"""
if n_jobs < 0:
n_jobs = max(cpu_count() + 1 + n_jobs, 1)
if Y is None:
Y = X
if n_jobs == 1:
# Special case to avoid picklability checks in delayed
return func(X, Y, **kwds)
# TODO: in some cases, backend='threading' may be appropriate
fd = delayed(func)
ret = Parallel(n_jobs=n_jobs, verbose=0)(
fd(X, Y[s], **kwds)
for s in gen_even_slices(Y.shape[0], n_jobs))
return np.hstack(ret)
def _pairwise_callable(X, Y, metric, **kwds):
"""Handle the callable case for pairwise_{distances,kernels}
"""
X, Y = check_pairwise_arrays(X, Y)
if X is Y:
# Only calculate metric for upper triangle
out = np.zeros((X.shape[0], Y.shape[0]), dtype='float')
iterator = itertools.combinations(range(X.shape[0]), 2)
for i, j in iterator:
out[i, j] = metric(X[i], Y[j], **kwds)
# Make symmetric
# NB: out += out.T will produce incorrect results
out = out + out.T
# Calculate diagonal
# NB: nonzero diagonals are allowed for both metrics and kernels
for i in range(X.shape[0]):
x = X[i]
out[i, i] = metric(x, x, **kwds)
else:
# Calculate all cells
out = np.empty((X.shape[0], Y.shape[0]), dtype='float')
iterator = itertools.product(range(X.shape[0]), range(Y.shape[0]))
for i, j in iterator:
out[i, j] = metric(X[i], Y[j], **kwds)
return out
_VALID_METRICS = ['euclidean', 'l2', 'l1', 'manhattan', 'cityblock',
'braycurtis', 'canberra', 'chebyshev', 'correlation',
'cosine', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener',
'sokalsneath', 'sqeuclidean', 'yule', "wminkowski"]
def pairwise_distances(X, Y=None, metric="euclidean", n_jobs=1, **kwds):
""" Compute the distance matrix from a vector array X and optional Y.
This method takes either a vector array or a distance matrix, and returns
a distance matrix. If the input is a vector array, the distances are
computed. If the input is a distances matrix, it is returned instead.
This method provides a safe way to take a distance matrix as input, while
preserving compatibility with many other algorithms that take a vector
array.
If Y is given (default is None), then the returned matrix is the pairwise
distance between the arrays from both X and Y.
Valid values for metric are:
- From scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']. These metrics support sparse matrix inputs.
- From scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis',
'matching', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean',
'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics. These metrics do not support sparse matrix inputs.
Note that in the case of 'cityblock', 'cosine' and 'euclidean' (which are
valid scipy.spatial.distance metrics), the scikit-learn implementation
will be used, which is faster and has support for sparse matrices (except
for 'cityblock'). For a verbose description of the metrics from
scikit-learn, see the __doc__ of the sklearn.pairwise.distance_metrics
function.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \
[n_samples_a, n_features] otherwise
Array of pairwise distances between samples, or a feature array.
Y : array [n_samples_b, n_features], optional
An optional second feature array. Only allowed if metric != "precomputed".
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
allowed by scipy.spatial.distance.pdist for its metric parameter, or
a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
If metric is "precomputed", X is assumed to be a distance matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
n_jobs : int
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
**kwds : optional keyword parameters
Any further parameters are passed directly to the distance function.
If using a scipy.spatial.distance metric, the parameters are still
metric dependent. See the scipy docs for usage examples.
Returns
-------
D : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b]
A distance matrix D such that D_{i, j} is the distance between the
ith and jth vectors of the given matrix X, if Y is None.
If Y is not None, then D_{i, j} is the distance between the ith array
from X and the jth array from Y.
"""
if (metric not in _VALID_METRICS and
not callable(metric) and metric != "precomputed"):
raise ValueError("Unknown metric %s. "
"Valid metrics are %s, or 'precomputed', or a "
"callable" % (metric, _VALID_METRICS))
if metric == "precomputed":
X, _ = check_pairwise_arrays(X, Y, precomputed=True)
return X
elif metric in PAIRWISE_DISTANCE_FUNCTIONS:
func = PAIRWISE_DISTANCE_FUNCTIONS[metric]
elif callable(metric):
func = partial(_pairwise_callable, metric=metric, **kwds)
else:
if issparse(X) or issparse(Y):
raise TypeError("scipy distance metrics do not"
" support sparse matrices.")
dtype = bool if metric in PAIRWISE_BOOLEAN_FUNCTIONS else None
X, Y = check_pairwise_arrays(X, Y, dtype=dtype)
if n_jobs == 1 and X is Y:
return distance.squareform(distance.pdist(X, metric=metric,
**kwds))
func = partial(distance.cdist, metric=metric, **kwds)
return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
# These distances recquire boolean arrays, when using scipy.spatial.distance
PAIRWISE_BOOLEAN_FUNCTIONS = [
'dice',
'jaccard',
'kulsinski',
'matching',
'rogerstanimoto',
'russellrao',
'sokalmichener',
'sokalsneath',
'yule',
]
# Helper functions - distance
PAIRWISE_KERNEL_FUNCTIONS = {
# If updating this dictionary, update the doc in both distance_metrics()
# and also in pairwise_distances()!
'additive_chi2': additive_chi2_kernel,
'chi2': chi2_kernel,
'linear': linear_kernel,
'polynomial': polynomial_kernel,
'poly': polynomial_kernel,
'rbf': rbf_kernel,
'laplacian': laplacian_kernel,
'sigmoid': sigmoid_kernel,
'cosine': cosine_similarity, }
def kernel_metrics():
""" Valid metrics for pairwise_kernels
This function simply returns the valid pairwise distance metrics.
It exists, however, to allow for a verbose description of the mapping for
each of the valid strings.
The valid distance metrics, and the function they map to, are:
=============== ========================================
metric Function
=============== ========================================
'additive_chi2' sklearn.pairwise.additive_chi2_kernel
'chi2' sklearn.pairwise.chi2_kernel
'linear' sklearn.pairwise.linear_kernel
'poly' sklearn.pairwise.polynomial_kernel
'polynomial' sklearn.pairwise.polynomial_kernel
'rbf' sklearn.pairwise.rbf_kernel
'laplacian' sklearn.pairwise.laplacian_kernel
'sigmoid' sklearn.pairwise.sigmoid_kernel
'cosine' sklearn.pairwise.cosine_similarity
=============== ========================================
Read more in the :ref:`User Guide <metrics>`.
"""
return PAIRWISE_KERNEL_FUNCTIONS
KERNEL_PARAMS = {
"additive_chi2": (),
"chi2": frozenset(["gamma"]),
"cosine": (),
"linear": (),
"poly": frozenset(["gamma", "degree", "coef0"]),
"polynomial": frozenset(["gamma", "degree", "coef0"]),
"rbf": frozenset(["gamma"]),
"laplacian": frozenset(["gamma"]),
"sigmoid": frozenset(["gamma", "coef0"]),
}
def pairwise_kernels(X, Y=None, metric="linear", filter_params=False,
n_jobs=1, **kwds):
"""Compute the kernel between arrays X and optional array Y.
This method takes either a vector array or a kernel matrix, and returns
a kernel matrix. If the input is a vector array, the kernels are
computed. If the input is a kernel matrix, it is returned instead.
This method provides a safe way to take a kernel matrix as input, while
preserving compatibility with many other algorithms that take a vector
array.
If Y is given (default is None), then the returned matrix is the pairwise
kernel between the arrays from both X and Y.
Valid values for metric are::
['rbf', 'sigmoid', 'polynomial', 'poly', 'linear', 'cosine']
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \
[n_samples_a, n_features] otherwise
Array of pairwise kernels between samples, or a feature array.
Y : array [n_samples_b, n_features]
A second feature array only if X has shape [n_samples_a, n_features].
metric : string, or callable
The metric to use when calculating kernel between instances in a
feature array. If metric is a string, it must be one of the metrics
in pairwise.PAIRWISE_KERNEL_FUNCTIONS.
If metric is "precomputed", X is assumed to be a kernel matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
filter_params : boolean
Whether to filter invalid parameters or not.
n_jobs : int
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
**kwds : optional keyword parameters
Any further parameters are passed directly to the kernel function.
Returns
-------
K : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b]
A kernel matrix K such that K_{i, j} is the kernel between the
ith and jth vectors of the given matrix X, if Y is None.
If Y is not None, then K_{i, j} is the kernel between the ith array
from X and the jth array from Y.
Notes
-----
If metric is 'precomputed', Y is ignored and X is returned.
"""
# import GPKernel locally to prevent circular imports
from ..gaussian_process.kernels import Kernel as GPKernel
if metric == "precomputed":
X, _ = check_pairwise_arrays(X, Y, precomputed=True)
return X
elif isinstance(metric, GPKernel):
func = metric.__call__
elif metric in PAIRWISE_KERNEL_FUNCTIONS:
if filter_params:
kwds = dict((k, kwds[k]) for k in kwds
if k in KERNEL_PARAMS[metric])
func = PAIRWISE_KERNEL_FUNCTIONS[metric]
elif callable(metric):
func = partial(_pairwise_callable, metric=metric, **kwds)
else:
raise ValueError("Unknown kernel %r" % metric)
return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
| mit |
tumbl3w33d/ansible | lib/ansible/modules/network/junos/_junos_vlan.py | 21 | 7120 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_vlan
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage VLANs on Juniper JUNOS network devices
description:
- This module provides declarative management of VLANs
on Juniper JUNOS network devices.
deprecated:
removed_in: "2.13"
why: Updated modules released with more functionality
alternative: Use M(junos_vlans) instead.
options:
name:
description:
- Name of the VLAN.
required: true
vlan_id:
description:
- ID of the VLAN. Range 1-4094.
required: true
l3_interface:
description:
- Name of logical layer 3 interface.
version_added: "2.7"
filter_input:
description:
- The name of input filter.
version_added: "2.8"
filter_output:
description:
- The name of output filter.
version_added: "2.8"
description:
description:
- Text description of VLANs.
interfaces:
description:
- List of interfaces to check the VLAN has been
configured correctly.
aggregate:
description: List of VLANs definitions.
state:
description:
- State of the VLAN configuration.
default: present
choices: ['present', 'absent']
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: True
type: bool
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
extends_documentation_fragment: junos
"""
EXAMPLES = """
- name: configure VLAN ID and name
junos_vlan:
name: test
vlan_id: 20
- name: Link to logical layer 3 interface
junos_vlan:
name: test
vlan_id: 20
l3-interface: vlan.20
- name: remove VLAN configuration
junos_vlan:
name: test
state: absent
- name: deactive VLAN configuration
junos_vlan:
name: test
state: present
active: False
- name: activate VLAN configuration
junos_vlan:
name: test
state: present
active: True
- name: Create vlan configuration using aggregate
junos_vlan:
aggregate:
- { vlan_id: 159, name: test_vlan_1, description: test vlan-1 }
- { vlan_id: 160, name: test_vlan_2, description: test vlan-2 }
- name: Delete vlan configuration using aggregate
junos_vlan:
aggregate:
- { vlan_id: 159, name: test_vlan_1 }
- { vlan_id: 160, name: test_vlan_2 }
state: absent
"""
RETURN = """
diff.prepared:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: str
sample: >
[edit vlans]
+ test-vlan-1 {
+ vlan-id 60;
+ }
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.junos.junos import junos_argument_spec, tostring
from ansible.module_utils.network.junos.junos import load_config, map_params_to_obj, map_obj_to_ele, to_param_list
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
USE_PERSISTENT_CONNECTION = True
def validate_vlan_id(value, module):
if value and not 1 <= value <= 4094:
module.fail_json(msg='vlan_id must be between 1 and 4094')
def validate_param_values(module, obj, param=None):
if not param:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
validator(param.get(key), module)
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
vlan_id=dict(type='int'),
description=dict(),
interfaces=dict(),
l3_interface=dict(),
filter_input=dict(),
filter_output=dict(),
state=dict(default='present', choices=['present', 'absent']),
active=dict(default=True, type='bool')
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec)
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
required_one_of = [['aggregate', 'name']]
mutually_exclusive = [['aggregate', 'name']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
top = 'vlans/vlan'
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update([
('name', {'xpath': 'name', 'is_key': True}),
('vlan_id', 'vlan-id'),
('l3_interface', 'l3-interface'),
('filter_input', 'forwarding-options/filter/input'),
('filter_output', 'forwarding-options/filter/output'),
('description', 'description')
])
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
item = param.copy()
validate_param_values(module, param_to_xpath_map, param=item)
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
diff = None
with locked_config(module):
for req in requests:
diff = load_config(module, tostring(req), warnings, action='merge')
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
| gpl-3.0 |
x303597316/hue | desktop/core/src/desktop/middleware.py | 4 | 23190 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import inspect
import json
import logging
import os.path
import re
import tempfile
import kerberos
from datetime import datetime
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, BACKEND_SESSION_KEY, authenticate, load_backend, login
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.core import exceptions, urlresolvers
import django.db
from django.http import HttpResponseNotAllowed
from django.core.urlresolvers import resolve
from django.http import HttpResponseRedirect, HttpResponse
from django.utils.translation import ugettext as _
from django.utils.http import urlquote, is_safe_url
from django.utils.encoding import iri_to_uri
import django.views.static
import desktop.views
import desktop.conf
from desktop.context_processors import get_app_name
from desktop.lib import apputil, i18n
from desktop.lib.django_util import render, render_json, is_jframe_request
from desktop.lib.exceptions import StructuredException
from desktop.lib.exceptions_renderable import PopupException
from desktop.log.access import access_log, log_page_hit
from desktop import appmanager
from desktop import metrics
from hadoop import cluster
from desktop.log import get_audit_logger
LOG = logging.getLogger(__name__)
MIDDLEWARE_HEADER = "X-Hue-Middleware-Response"
# Views inside Django that don't require login
# (see LoginAndPermissionMiddleware)
DJANGO_VIEW_AUTH_WHITELIST = [
django.views.static.serve,
desktop.views.is_alive,
]
class AjaxMiddleware(object):
"""
Middleware that augments request to set request.ajax
for either is_ajax() (looks at HTTP headers) or ?format=json
GET parameters.
"""
def process_request(self, request):
request.ajax = request.is_ajax() or request.REQUEST.get("format", "") == "json"
return None
class ExceptionMiddleware(object):
"""
If exceptions know how to render themselves, use that.
"""
def process_exception(self, request, exception):
import traceback
tb = traceback.format_exc()
logging.info("Processing exception: %s: %s" % (i18n.smart_unicode(exception),
i18n.smart_unicode(tb)))
if isinstance(exception, PopupException):
return exception.response(request)
if isinstance(exception, StructuredException):
if request.ajax:
response = render_json(exception.response_data)
response[MIDDLEWARE_HEADER] = 'EXCEPTION'
response.status_code = getattr(exception, 'error_code', 500)
return response
else:
response = render("error.mako", request,
dict(error=exception.response_data.get("message")))
response.status_code = getattr(exception, 'error_code', 500)
return response
return None
class ClusterMiddleware(object):
"""
Manages setting request.fs and request.jt
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
Sets request.fs and request.jt on every request to point to the
configured filesystem.
"""
request.fs_ref = request.REQUEST.get('fs', view_kwargs.get('fs', 'default'))
if "fs" in view_kwargs:
del view_kwargs["fs"]
try:
request.fs = cluster.get_hdfs(request.fs_ref)
except KeyError:
raise KeyError(_('Cannot find HDFS called "%(fs_ref)s".') % {'fs_ref': request.fs_ref})
if request.user.is_authenticated():
if request.fs is not None:
request.fs.setuser(request.user.username)
request.jt = cluster.get_default_mrcluster() # Deprecated, only there for MR1
if request.jt is not None:
request.jt.setuser(request.user.username)
else:
request.jt = None
class NotificationMiddleware(object):
"""
Manages setting request.info and request.error
"""
def process_view(self, request, view_func, view_args, view_kwargs):
def message(title, detail=None):
if detail is None:
detail = ''
else:
detail = '<br/>%s' % detail
return '%s %s' % (title, detail)
def info(title, detail=None):
messages.info(request, message(title, detail))
def error(title, detail=None):
messages.error(request, message(title, detail))
def warn(title, detail=None):
messages.warning(request, message(title, detail))
request.info = info
request.error = error
request.warn = warn
class AppSpecificMiddleware(object):
@classmethod
def augment_request_with_app(cls, request, view_func):
""" Stuff the app into the request for use in later-stage middleware """
if not hasattr(request, "_desktop_app"):
module = inspect.getmodule(view_func)
request._desktop_app = apputil.get_app_for_module(module)
if not request._desktop_app and not module.__name__.startswith('django.'):
logging.debug("no app for view func: %s in %s" % (view_func, module))
def __init__(self):
self.middlewares_by_app = {}
for app in appmanager.DESKTOP_APPS:
self.middlewares_by_app[app.name] = self._load_app_middleware(app)
def _get_middlewares(self, app, type):
return self.middlewares_by_app.get(app, {}).get(type, [])
def process_view(self, request, view_func, view_args, view_kwargs):
"""View middleware"""
self.augment_request_with_app(request, view_func)
if not request._desktop_app:
return None
# Run the middlewares
ret = None
for middleware in self._get_middlewares(request._desktop_app, 'view'):
ret = middleware(request, view_func, view_args, view_kwargs)
if ret: return ret # short circuit
return ret
def process_response(self, request, response):
"""Response middleware"""
# We have the app that we stuffed in there
if not hasattr(request, '_desktop_app'):
logging.debug("No desktop_app known for request.")
return response
for middleware in reversed(self._get_middlewares(request._desktop_app, 'response')):
response = middleware(request, response)
return response
def process_exception(self, request, exception):
"""Exception middleware"""
# We have the app that we stuffed in there
if not hasattr(request, '_desktop_app'):
logging.debug("No desktop_app known for exception.")
return None
# Run the middlewares
ret = None
for middleware in self._get_middlewares(request._desktop_app, 'exception'):
ret = middleware(request, exception)
if ret: return ret # short circuit
return ret
def _load_app_middleware(cls, app):
app_settings = app.settings
if not app_settings:
return
mw_classes = app_settings.__dict__.get('MIDDLEWARE_CLASSES', [])
result = {'view': [], 'response': [], 'exception': []}
for middleware_path in mw_classes:
# This code brutally lifted from django.core.handlers
try:
dot = middleware_path.rindex('.')
except ValueError:
raise exceptions.ImproperlyConfigured, _('%(module)s isn\'t a middleware module.') % {'module': middleware_path}
mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
try:
mod = __import__(mw_module, {}, {}, [''])
except ImportError, e:
raise exceptions.ImproperlyConfigured, _('Error importing middleware %(module)s: "%(error)s".') % {'module': mw_module, 'error': e}
try:
mw_class = getattr(mod, mw_classname)
except AttributeError:
raise exceptions.ImproperlyConfigured, _('Middleware module "%(module)s" does not define a "%(class)s" class.') % {'module': mw_module, 'class':mw_classname}
try:
mw_instance = mw_class()
except exceptions.MiddlewareNotUsed:
continue
# End brutal code lift
# We need to make sure we don't have a process_request function because we don't know what
# application will handle the request at the point process_request is called
if hasattr(mw_instance, 'process_request'):
raise exceptions.ImproperlyConfigured, \
_('AppSpecificMiddleware module "%(module)s" has a process_request function' + \
' which is impossible.') % {'module': middleware_path}
if hasattr(mw_instance, 'process_view'):
result['view'].append(mw_instance.process_view)
if hasattr(mw_instance, 'process_response'):
result['response'].insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
result['exception'].insert(0, mw_instance.process_exception)
return result
class LoginAndPermissionMiddleware(object):
"""
Middleware that forces all views (except those that opt out) through authentication.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
We also perform access logging in ``process_view()`` since we have the view function,
which tells us the log level. The downside is that we don't have the status code,
which isn't useful for status logging anyways.
"""
access_log_level = getattr(view_func, 'access_log_level', None)
# First, skip views not requiring login
# If the view has "opted out" of login required, skip
if hasattr(view_func, "login_notrequired"):
log_page_hit(request, view_func, level=access_log_level or logging.DEBUG)
return None
# There are certain django views which are also opt-out, but
# it would be evil to go add attributes to them
if view_func in DJANGO_VIEW_AUTH_WHITELIST:
log_page_hit(request, view_func, level=access_log_level or logging.DEBUG)
return None
# If user is logged in, check that he has permissions to access the
# app.
if request.user.is_active and request.user.is_authenticated():
AppSpecificMiddleware.augment_request_with_app(request, view_func)
# Until we get Django 1.3 and resolve returning the URL name, we just do a match of the name of the view
try:
access_view = 'access_view:%s:%s' % (request._desktop_app, resolve(request.path)[0].__name__)
except Exception, e:
access_log(request, 'error checking view perm: %s', e, level=access_log_level)
access_view =''
# Accessing an app can access an underlying other app.
# e.g. impala or spark uses code from beeswax and so accessing impala shows up as beeswax here.
# Here we trust the URL to be the real app we need to check the perms.
app_accessed = request._desktop_app
ui_app_accessed = get_app_name(request)
if app_accessed != ui_app_accessed and ui_app_accessed not in ('logs', 'accounts', 'login'):
app_accessed = ui_app_accessed
if app_accessed and \
app_accessed not in ("desktop", "home", "about") and \
not (request.user.has_hue_permission(action="access", app=app_accessed) or
request.user.has_hue_permission(action=access_view, app=app_accessed)):
access_log(request, 'permission denied', level=access_log_level)
return PopupException(
_("You do not have permission to access the %(app_name)s application.") % {'app_name': app_accessed.capitalize()}, error_code=401).response(request)
else:
log_page_hit(request, view_func, level=access_log_level)
return None
logging.info("Redirecting to login page: %s", request.get_full_path())
access_log(request, 'login redirection', level=access_log_level)
if request.ajax:
# Send back a magic header which causes Hue.Request to interpose itself
# in the ajax request and make the user login before resubmitting the
# request.
response = HttpResponse("/* login required */", content_type="text/javascript")
response[MIDDLEWARE_HEADER] = 'LOGIN_REQUIRED'
return response
else:
return HttpResponseRedirect("%s?%s=%s" % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path())))
class JsonMessage(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def __str__(self):
return json.dumps(self.kwargs)
class AuditLoggingMiddleware(object):
def __init__(self):
from desktop.conf import AUDIT_EVENT_LOG_DIR
if not AUDIT_EVENT_LOG_DIR.get():
LOG.info('Unloading AuditLoggingMiddleware')
raise exceptions.MiddlewareNotUsed
def process_response(self, request, response):
try:
audit_logger = get_audit_logger()
audit_logger.debug(JsonMessage(**{
datetime.utcnow().strftime('%s'): {
'user': request.user.username if hasattr(request, 'user') else 'anonymous',
"status": response.status_code,
"impersonator": None,
"ip_address": request.META.get('REMOTE_ADDR'),
"authorization_failure": response.status_code == 401,
"service": get_app_name(request),
"url": request.path,
}
}))
response['audited'] = True
except Exception, e:
LOG.error('Could not audit the request: %s' % e)
return response
try:
import tidylib
_has_tidylib = True
except Exception, ex:
# The exception type is not ImportError. It's actually an OSError.
logging.warn("Failed to import tidylib (for debugging). Is libtidy installed?")
_has_tidylib = False
class HtmlValidationMiddleware(object):
"""
If configured, validate output html for every response.
"""
def __init__(self):
self._logger = logging.getLogger('HtmlValidationMiddleware')
if not _has_tidylib:
logging.error("HtmlValidationMiddleware not activatived: "
"Failed to import tidylib.")
return
# Things that we don't care about
self._to_ignore = (
re.compile('- Warning: <.*> proprietary attribute "data-'),
re.compile('- Warning: trimming empty'),
re.compile('- Info:'),
)
# Find the directory to write tidy html output
try:
self._outdir = os.path.join(tempfile.gettempdir(), 'hue_html_validation')
if not os.path.isdir(self._outdir):
os.mkdir(self._outdir, 0755)
except Exception, ex:
self._logger.exception('Failed to get temp directory: %s', (ex,))
self._outdir = tempfile.mkdtemp(prefix='hue_html_validation-')
# Options to pass to libtidy. See
# http://tidy.sourceforge.net/docs/quickref.html
self._options = {
'show-warnings': 1,
'output-html': 0,
'output-xhtml': 1,
'char-encoding': 'utf8',
'output-encoding': 'utf8',
'indent': 1,
'wrap': 0,
}
def process_response(self, request, response):
if not _has_tidylib or not self._is_html(request, response):
return response
html, errors = tidylib.tidy_document(response.content,
self._options,
keep_doc=True)
if not errors:
return response
# Filter out what we care about
err_list = errors.rstrip().split('\n')
err_list = self._filter_warnings(err_list)
if not err_list:
return response
try:
fn = urlresolvers.resolve(request.path)[0]
fn_name = '%s.%s' % (fn.__module__, fn.__name__)
except:
LOG.exception('failed to resolve url')
fn_name = '<unresolved_url>'
# Write the two versions of html out for offline debugging
filename = os.path.join(self._outdir, fn_name)
result = "HTML tidy result: %s [%s]:" \
"\n\t%s" \
"\nPlease see %s.orig %s.tidy\n-------" % \
(request.path, fn_name, '\n\t'.join(err_list), filename, filename)
file(filename + '.orig', 'w').write(i18n.smart_str(response.content))
file(filename + '.tidy', 'w').write(i18n.smart_str(html))
file(filename + '.info', 'w').write(i18n.smart_str(result))
self._logger.error(result)
return response
def _filter_warnings(self, err_list):
"""A hacky way to filter out things that we don't care about."""
res = [ ]
for err in err_list:
for ignore in self._to_ignore:
if ignore.search(err):
break
else:
res.append(err)
return res
def _is_html(self, request, response):
return not request.is_ajax() and \
'html' in response['Content-Type'] and \
200 <= response.status_code < 300
class SpnegoMiddleware(object):
"""
Based on the WSGI SPNEGO middlware class posted here:
http://code.activestate.com/recipes/576992/
"""
def __init__(self):
if not 'desktop.auth.backend.SpnegoDjangoBackend' in desktop.conf.AUTH.BACKEND.get():
LOG.info('Unloading SpnegoMiddleware')
raise exceptions.MiddlewareNotUsed
def process_response(self, request, response):
if 'GSS-String' in request.META:
response['WWW-Authenticate'] = request.META['GSS-String']
elif 'Return-401' in request.META:
response = HttpResponse("401 Unauthorized", content_type="text/plain",
status=401)
response['WWW-Authenticate'] = 'Negotiate'
response.status = 401
return response
def process_request(self, request):
"""
The process_request() method needs to communicate some state to the
process_response() method. The two options for this are to return an
HttpResponse object or to modify the META headers in the request object. In
order to ensure that all of the middleware is properly invoked, this code
currently uses the later approach. The following headers are currently used:
GSS-String:
This means that GSS authentication was successful and that we need to pass
this value for the WWW-Authenticate header in the response.
Return-401:
This means that the SPNEGO backend is in use, but we didn't get an
AUTHORIZATION header from the client. The way that the protocol works
(http://tools.ietf.org/html/rfc4559) is by having the first response to an
un-authenticated request be a 401 with the WWW-Authenticate header set to
Negotiate. This will cause the browser to re-try the request with the
AUTHORIZATION header set.
"""
# AuthenticationMiddleware is required so that request.user exists.
if not hasattr(request, 'user'):
raise ImproperlyConfigured(
"The Django remote user auth middleware requires the"
" authentication middleware to be installed. Edit your"
" MIDDLEWARE_CLASSES setting to insert"
" 'django.contrib.auth.middleware.AuthenticationMiddleware'"
" before the SpnegoUserMiddleware class.")
if 'HTTP_AUTHORIZATION' in request.META:
type, authstr = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
if type == 'Negotiate':
try:
result, context = kerberos.authGSSServerInit('HTTP')
if result != 1:
return
gssstring=''
r=kerberos.authGSSServerStep(context,authstr)
if r == 1:
gssstring=kerberos.authGSSServerResponse(context)
request.META['GSS-String'] = 'Negotiate %s' % gssstring
else:
kerberos.authGSSServerClean(context)
return
username = kerberos.authGSSServerUserName(context)
kerberos.authGSSServerClean(context)
if request.user.is_authenticated():
if request.user.username == self.clean_username(username, request):
return
user = authenticate(username=username)
if user:
request.user = user
login(request, user)
return
except:
LOG.exception('Unexpected error when authenticating against KDC')
return
else:
request.META['Return-401'] = ''
return
else:
if not request.user.is_authenticated():
request.META['Return-401'] = ''
return
def clean_username(self, username, request):
"""
Allows the backend to clean the username, if the backend defines a
clean_username method.
"""
backend_str = request.session[BACKEND_SESSION_KEY]
backend = load_backend(backend_str)
try:
username = backend.clean_username(username)
except AttributeError:
pass
return username
class HueRemoteUserMiddleware(RemoteUserMiddleware):
"""
Middleware to delegate authentication to a proxy server. The proxy server
will set an HTTP header (defaults to Remote-User) with the name of the
authenticated user. This class extends the RemoteUserMiddleware class
built into Django with the ability to configure the HTTP header and to
unload the middleware if the RemoteUserDjangoBackend is not currently
in use.
"""
def __init__(self):
if not 'desktop.auth.backend.RemoteUserDjangoBackend' in desktop.conf.AUTH.BACKEND.get():
LOG.info('Unloading HueRemoteUserMiddleware')
raise exceptions.MiddlewareNotUsed
self.header = desktop.conf.AUTH.REMOTE_USER_HEADER.get()
class EnsureSafeMethodMiddleware(object):
"""
Middleware to white list configured HTTP request methods.
"""
def process_request(self, request):
if request.method not in desktop.conf.HTTP_ALLOWED_METHODS.get():
return HttpResponseNotAllowed(desktop.conf.HTTP_ALLOWED_METHODS.get())
class EnsureSafeRedirectURLMiddleware(object):
"""
Middleware to white list configured redirect URLs.
"""
def process_response(self, request, response):
if response.status_code in (301, 302, 303, 305, 307, 308) and response.get('Location'):
redirection_patterns = desktop.conf.REDIRECT_WHITELIST.get()
location = response['Location']
if any(regexp.match(location) for regexp in redirection_patterns):
return response
if is_safe_url(location, request.get_host()):
return response
response = render("error.mako", request, dict(error=_('Redirect to %s is not allowed.') % response['Location']))
response.status_code = 403
return response
else:
return response
class MetricsMiddleware(object):
"""
Middleware to track the number of active requests.
"""
def process_request(self, request):
self._response_timer = metrics.response_time.time()
metrics.active_requests.inc()
def process_exception(self, request, exception):
self._response_timer.stop()
metrics.request_exceptions.inc()
def process_response(self, request, response):
self._response_timer.stop()
metrics.active_requests.dec()
return response
| apache-2.0 |
nysan/yocto-autobuilder | lib/python2.6/site-packages/Twisted-11.0.0-py2.6-linux-x86_64.egg/twisted/mail/test/test_mail.py | 17 | 68818 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for large portions of L{twisted.mail}.
"""
import os
import errno
import shutil
import pickle
import StringIO
import rfc822
import tempfile
import signal
from zope.interface import Interface, implements
from twisted.trial import unittest
from twisted.mail import smtp
from twisted.mail import pop3
from twisted.names import dns
from twisted.internet import protocol
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.internet import reactor
from twisted.internet import interfaces
from twisted.internet import task
from twisted.internet.error import DNSLookupError, CannotListenError
from twisted.internet.error import ProcessDone, ProcessTerminated
from twisted.internet import address
from twisted.python import failure
from twisted.python.filepath import FilePath
from twisted.python.hashlib import md5
from twisted import mail
import twisted.mail.mail
import twisted.mail.maildir
import twisted.mail.relay
import twisted.mail.relaymanager
import twisted.mail.protocols
import twisted.mail.alias
from twisted.names.error import DNSNameError
from twisted.names.dns import RRHeader, Record_CNAME, Record_MX
from twisted import cred
import twisted.cred.credentials
import twisted.cred.checkers
import twisted.cred.portal
from twisted.test.proto_helpers import LineSendingProtocol
class DomainWithDefaultsTestCase(unittest.TestCase):
def testMethods(self):
d = dict([(x, x + 10) for x in range(10)])
d = mail.mail.DomainWithDefaultDict(d, 'Default')
self.assertEquals(len(d), 10)
self.assertEquals(list(iter(d)), range(10))
self.assertEquals(list(d.iterkeys()), list(iter(d)))
items = list(d.iteritems())
items.sort()
self.assertEquals(items, [(x, x + 10) for x in range(10)])
values = list(d.itervalues())
values.sort()
self.assertEquals(values, range(10, 20))
items = d.items()
items.sort()
self.assertEquals(items, [(x, x + 10) for x in range(10)])
values = d.values()
values.sort()
self.assertEquals(values, range(10, 20))
for x in range(10):
self.assertEquals(d[x], x + 10)
self.assertEquals(d.get(x), x + 10)
self.failUnless(x in d)
self.failUnless(d.has_key(x))
del d[2], d[4], d[6]
self.assertEquals(len(d), 7)
self.assertEquals(d[2], 'Default')
self.assertEquals(d[4], 'Default')
self.assertEquals(d[6], 'Default')
d.update({'a': None, 'b': (), 'c': '*'})
self.assertEquals(len(d), 10)
self.assertEquals(d['a'], None)
self.assertEquals(d['b'], ())
self.assertEquals(d['c'], '*')
d.clear()
self.assertEquals(len(d), 0)
self.assertEquals(d.setdefault('key', 'value'), 'value')
self.assertEquals(d['key'], 'value')
self.assertEquals(d.popitem(), ('key', 'value'))
self.assertEquals(len(d), 0)
dcopy = d.copy()
self.assertEquals(d.domains, dcopy.domains)
self.assertEquals(d.default, dcopy.default)
def _stringificationTest(self, stringifier):
"""
Assert that the class name of a L{mail.mail.DomainWithDefaultDict}
instance and the string-formatted underlying domain dictionary both
appear in the string produced by the given string-returning function.
@type stringifier: one-argument callable
@param stringifier: either C{str} or C{repr}, to be used to get a
string to make assertions against.
"""
domain = mail.mail.DomainWithDefaultDict({}, 'Default')
self.assertIn(domain.__class__.__name__, stringifier(domain))
domain['key'] = 'value'
self.assertIn(str({'key': 'value'}), stringifier(domain))
def test_str(self):
"""
L{DomainWithDefaultDict.__str__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(str)
def test_repr(self):
"""
L{DomainWithDefaultDict.__repr__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(repr)
class BounceTestCase(unittest.TestCase):
def setUp(self):
self.domain = mail.mail.BounceDomain()
def testExists(self):
self.assertRaises(smtp.AddressError, self.domain.exists, "any user")
def testRelay(self):
self.assertEquals(
self.domain.willRelay("random q emailer", "protocol"),
False
)
def testMessage(self):
self.assertRaises(NotImplementedError, self.domain.startMessage, "whomever")
def testAddUser(self):
self.domain.addUser("bob", "password")
self.assertRaises(smtp.SMTPBadRcpt, self.domain.exists, "bob")
class FileMessageTestCase(unittest.TestCase):
def setUp(self):
self.name = "fileMessage.testFile"
self.final = "final.fileMessage.testFile"
self.f = file(self.name, 'w')
self.fp = mail.mail.FileMessage(self.f, self.name, self.final)
def tearDown(self):
try:
self.f.close()
except:
pass
try:
os.remove(self.name)
except:
pass
try:
os.remove(self.final)
except:
pass
def testFinalName(self):
return self.fp.eomReceived().addCallback(self._cbFinalName)
def _cbFinalName(self, result):
self.assertEquals(result, self.final)
self.failUnless(self.f.closed)
self.failIf(os.path.exists(self.name))
def testContents(self):
contents = "first line\nsecond line\nthird line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.eomReceived()
self.assertEquals(file(self.final).read(), contents)
def testInterrupted(self):
contents = "first line\nsecond line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.connectionLost()
self.failIf(os.path.exists(self.name))
self.failIf(os.path.exists(self.final))
class MailServiceTestCase(unittest.TestCase):
def setUp(self):
self.service = mail.mail.MailService()
def testFactories(self):
f = self.service.getPOP3Factory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), pop3.POP3)
f = self.service.getSMTPFactory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.SMTP)
f = self.service.getESMTPFactory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.ESMTP)
def testPortals(self):
o1 = object()
o2 = object()
self.service.portals['domain'] = o1
self.service.portals[''] = o2
self.failUnless(self.service.lookupPortal('domain') is o1)
self.failUnless(self.service.defaultPortal() is o2)
class StringListMailboxTests(unittest.TestCase):
"""
Tests for L{StringListMailbox}, an in-memory only implementation of
L{pop3.IMailbox}.
"""
def test_listOneMessage(self):
"""
L{StringListMailbox.listMessages} returns the length of the message at
the offset into the mailbox passed to it.
"""
mailbox = mail.maildir.StringListMailbox(["abc", "ab", "a"])
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(1), 2)
self.assertEqual(mailbox.listMessages(2), 1)
def test_listAllMessages(self):
"""
L{StringListMailbox.listMessages} returns a list of the lengths of all
messages if not passed an index.
"""
mailbox = mail.maildir.StringListMailbox(["a", "abc", "ab"])
self.assertEqual(mailbox.listMessages(), [1, 3, 2])
def test_getMessage(self):
"""
L{StringListMailbox.getMessage} returns a file-like object from which
the contents of the message at the given offset into the mailbox can be
read.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "real contents"])
self.assertEqual(mailbox.getMessage(1).read(), "real contents")
def test_getUidl(self):
"""
L{StringListMailbox.getUidl} returns a unique identifier for the
message at the given offset into the mailbox.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "bar"])
self.assertNotEqual(mailbox.getUidl(0), mailbox.getUidl(1))
def test_deleteMessage(self):
"""
L{StringListMailbox.deleteMessage} marks a message for deletion causing
further requests for its length to return 0.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
def test_undeleteMessages(self):
"""
L{StringListMailbox.undeleteMessages} causes any messages marked for
deletion to be returned to their original state.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(), [3])
def test_sync(self):
"""
L{StringListMailbox.sync} causes any messages as marked for deletion to
be permanently deleted.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.sync()
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
class FailingMaildirMailboxAppendMessageTask(mail.maildir._MaildirMailboxAppendMessageTask):
_openstate = True
_writestate = True
_renamestate = True
def osopen(self, fn, attr, mode):
if self._openstate:
return os.open(fn, attr, mode)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
def oswrite(self, fh, data):
if self._writestate:
return os.write(fh, data)
else:
raise OSError(errno.ENOSPC, "Faked Space problem")
def osrename(self, oldname, newname):
if self._renamestate:
return os.rename(oldname, newname)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
class _AppendTestMixin(object):
"""
Mixin for L{MaildirMailbox.appendMessage} test cases which defines a helper
for serially appending multiple messages to a mailbox.
"""
def _appendMessages(self, mbox, messages):
"""
Deliver the given messages one at a time. Delivery is serialized to
guarantee a predictable order in the mailbox (overlapped message deliver
makes no guarantees about which message which appear first).
"""
results = []
def append():
for m in messages:
d = mbox.appendMessage(m)
d.addCallback(results.append)
yield d
d = task.cooperate(append()).whenDone()
d.addCallback(lambda ignored: results)
return d
class MaildirAppendStringTestCase(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def _append(self, ignored, mbox):
d = mbox.appendMessage('TEST')
return self.assertFailure(d, Exception)
def _setState(self, ignored, mbox, rename=None, write=None, open=None):
"""
Change the behavior of future C{rename}, C{write}, or C{open} calls made
by the mailbox C{mbox}.
@param rename: If not C{None}, a new value for the C{_renamestate}
attribute of the mailbox's append factory. The original value will
be restored at the end of the test.
@param write: Like C{rename}, but for the C{_writestate} attribute.
@param open: Like C{rename}, but for the C{_openstate} attribute.
"""
if rename is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_renamestate',
mbox.AppendFactory._renamestate)
mbox.AppendFactory._renamestate = rename
if write is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_writestate',
mbox.AppendFactory._writestate)
mbox.AppendFactory._writestate = write
if open is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_openstate',
mbox.AppendFactory._openstate)
mbox.AppendFactory._openstate = open
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
mbox.AppendFactory = FailingMaildirMailboxAppendMessageTask
d = self._appendMessages(mbox, ["X" * i for i in range(1, 11)])
d.addCallback(self.assertEquals, [None] * 10)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, ignored, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEquals(len(mbox.listMessages()), 10)
self.assertEquals(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
# test in the right order: last to first error location.
self._setState(None, mbox, rename=False)
d = self._append(None, mbox)
d.addCallback(self._setState, mbox, rename=True, write=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, write=True, open=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, open=True)
return d
class MaildirAppendFileTestCase(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
messages = []
for i in xrange(1, 11):
temp = tempfile.TemporaryFile()
temp.write("X" * i)
temp.seek(0, 0)
messages.append(temp)
self.addCleanup(temp.close)
d = self._appendMessages(mbox, messages)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, result, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEquals(len(mbox.listMessages()), 10)
self.assertEquals(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
class MaildirTestCase(unittest.TestCase):
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def tearDown(self):
shutil.rmtree(self.d)
def testInitializer(self):
d = self.d
trash = os.path.join(d, '.Trash')
self.failUnless(os.path.exists(d) and os.path.isdir(d))
self.failUnless(os.path.exists(os.path.join(d, 'new')))
self.failUnless(os.path.exists(os.path.join(d, 'cur')))
self.failUnless(os.path.exists(os.path.join(d, 'tmp')))
self.failUnless(os.path.isdir(os.path.join(d, 'new')))
self.failUnless(os.path.isdir(os.path.join(d, 'cur')))
self.failUnless(os.path.isdir(os.path.join(d, 'tmp')))
self.failUnless(os.path.exists(os.path.join(trash, 'new')))
self.failUnless(os.path.exists(os.path.join(trash, 'cur')))
self.failUnless(os.path.exists(os.path.join(trash, 'tmp')))
self.failUnless(os.path.isdir(os.path.join(trash, 'new')))
self.failUnless(os.path.isdir(os.path.join(trash, 'cur')))
self.failUnless(os.path.isdir(os.path.join(trash, 'tmp')))
def test_nameGenerator(self):
"""
Each call to L{_MaildirNameGenerator.generate} returns a unique
string suitable for use as the basename of a new message file. The
names are ordered such that those generated earlier sort less than
those generated later.
"""
clock = task.Clock()
clock.advance(0.05)
generator = mail.maildir._MaildirNameGenerator(clock)
firstName = generator.generate()
clock.advance(0.05)
secondName = generator.generate()
self.assertTrue(firstName < secondName)
def test_mailbox(self):
"""
Exercise the methods of L{IMailbox} as implemented by
L{MaildirMailbox}.
"""
j = os.path.join
n = mail.maildir._generateMaildirName
msgs = [j(b, n()) for b in ('cur', 'new') for x in range(5)]
# Toss a few files into the mailbox
i = 1
for f in msgs:
fObj = file(j(self.d, f), 'w')
fObj.write('x' * i)
fObj.close()
i = i + 1
mb = mail.maildir.MaildirMailbox(self.d)
self.assertEquals(mb.listMessages(), range(1, 11))
self.assertEquals(mb.listMessages(1), 2)
self.assertEquals(mb.listMessages(5), 6)
self.assertEquals(mb.getMessage(6).read(), 'x' * 7)
self.assertEquals(mb.getMessage(1).read(), 'x' * 2)
d = {}
for i in range(10):
u = mb.getUidl(i)
self.failIf(u in d)
d[u] = None
p, f = os.path.split(msgs[5])
mb.deleteMessage(5)
self.assertEquals(mb.listMessages(5), 0)
self.failUnless(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.failIf(os.path.exists(j(self.d, msgs[5])))
mb.undeleteMessages()
self.assertEquals(mb.listMessages(5), 6)
self.failIf(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.failUnless(os.path.exists(j(self.d, msgs[5])))
class MaildirDirdbmDomainTestCase(unittest.TestCase):
def setUp(self):
self.P = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.P)
def tearDown(self):
shutil.rmtree(self.P)
def testAddUser(self):
toAdd = (('user1', 'pwd1'), ('user2', 'pwd2'), ('user3', 'pwd3'))
for (u, p) in toAdd:
self.D.addUser(u, p)
for (u, p) in toAdd:
self.failUnless(u in self.D.dbm)
self.assertEquals(self.D.dbm[u], p)
self.failUnless(os.path.exists(os.path.join(self.P, u)))
def testCredentials(self):
creds = self.D.getCredentialsCheckers()
self.assertEquals(len(creds), 1)
self.failUnless(cred.checkers.ICredentialsChecker.providedBy(creds[0]))
self.failUnless(cred.credentials.IUsernamePassword in creds[0].credentialInterfaces)
def testRequestAvatar(self):
class ISomething(Interface):
pass
self.D.addUser('user', 'password')
self.assertRaises(
NotImplementedError,
self.D.requestAvatar, 'user', None, ISomething
)
t = self.D.requestAvatar('user', None, pop3.IMailbox)
self.assertEquals(len(t), 3)
self.failUnless(t[0] is pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(t[1]))
t[2]()
def testRequestAvatarId(self):
self.D.addUser('user', 'password')
database = self.D.getCredentialsCheckers()[0]
creds = cred.credentials.UsernamePassword('user', 'wrong password')
self.assertRaises(
cred.error.UnauthorizedLogin,
database.requestAvatarId, creds
)
creds = cred.credentials.UsernamePassword('user', 'password')
self.assertEquals(database.requestAvatarId(creds), 'user')
class StubAliasableDomain(object):
"""
Minimal testable implementation of IAliasableDomain.
"""
implements(mail.mail.IAliasableDomain)
def exists(self, user):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def addUser(self, user, password):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def getCredentialsCheckers(self):
"""
This needs to succeed in order for other tests to complete
successfully, but we don't actually assert anything about its
behavior. Return an empty list. Sometime later we should return
something else and assert that a portal got set up properly.
"""
return []
def setAliasGroup(self, aliases):
"""
Just record the value so the test can check it later.
"""
self.aliasGroup = aliases
class ServiceDomainTestCase(unittest.TestCase):
def setUp(self):
self.S = mail.mail.MailService()
self.D = mail.protocols.DomainDeliveryBase(self.S, None)
self.D.service = self.S
self.D.protocolName = 'TEST'
self.D.host = 'hostname'
self.tmpdir = self.mktemp()
domain = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
domain.addUser('user', 'password')
self.S.addDomain('test.domain', domain)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAddAliasableDomain(self):
"""
Test that adding an IAliasableDomain to a mail service properly sets
up alias group references and such.
"""
aliases = object()
domain = StubAliasableDomain()
self.S.aliases = aliases
self.S.addDomain('example.com', domain)
self.assertIdentical(domain.aliasGroup, aliases)
def testReceivedHeader(self):
hdr = self.D.receivedHeader(
('remotehost', '123.232.101.234'),
smtp.Address('<someguy@somplace>'),
['user@host.name']
)
fp = StringIO.StringIO(hdr)
m = rfc822.Message(fp)
self.assertEquals(len(m.items()), 1)
self.failUnless(m.has_key('Received'))
def testValidateTo(self):
user = smtp.User('user@test.domain', 'helo', None, 'wherever@whatever')
return defer.maybeDeferred(self.D.validateTo, user
).addCallback(self._cbValidateTo
)
def _cbValidateTo(self, result):
self.failUnless(callable(result))
def testValidateToBadUsername(self):
user = smtp.User('resu@test.domain', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateToBadDomain(self):
user = smtp.User('user@domain.test', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateFrom(self):
helo = ('hostname', '127.0.0.1')
origin = smtp.Address('<user@hostname>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<user@hostname>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
self.assertRaises(
smtp.SMTPBadSender,
self.D.validateFrom, None, origin
)
class VirtualPOP3TestCase(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
self.D.addUser('user', 'password')
self.S.addDomain('test.domain', self.D)
portal = cred.portal.Portal(self.D)
map(portal.registerChecker, self.D.getCredentialsCheckers())
self.S.portals[''] = self.S.portals['test.domain'] = portal
self.P = mail.protocols.VirtualPOP3()
self.P.service = self.S
self.P.magic = '<unit test magic>'
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAuthenticateAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.P.authenticateUserAPOP('user', resp
).addCallback(self._cbAuthenticateAPOP
)
def _cbAuthenticateAPOP(self, result):
self.assertEquals(len(result), 3)
self.assertEquals(result[0], pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateIncorrectUserAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('resu', resp),
cred.error.UnauthorizedLogin)
def testAuthenticateIncorrectResponseAPOP(self):
resp = md5('wrong digest').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('user', resp),
cred.error.UnauthorizedLogin)
def testAuthenticatePASS(self):
return self.P.authenticateUserPASS('user', 'password'
).addCallback(self._cbAuthenticatePASS
)
def _cbAuthenticatePASS(self, result):
self.assertEquals(len(result), 3)
self.assertEquals(result[0], pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateBadUserPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('resu', 'password'),
cred.error.UnauthorizedLogin)
def testAuthenticateBadPasswordPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('user', 'wrong password'),
cred.error.UnauthorizedLogin)
class empty(smtp.User):
def __init__(self):
pass
class RelayTestCase(unittest.TestCase):
def testExists(self):
service = mail.mail.MailService()
domain = mail.relay.DomainQueuer(service)
doRelay = [
address.UNIXAddress('/var/run/mail-relay'),
address.IPv4Address('TCP', '127.0.0.1', 12345),
]
dontRelay = [
address.IPv4Address('TCP', '192.168.2.1', 62),
address.IPv4Address('TCP', '1.2.3.4', 1943),
]
for peer in doRelay:
user = empty()
user.orig = 'user@host'
user.dest = 'tsoh@resu'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
self.failUnless(callable(domain.exists(user)))
for peer in dontRelay:
user = empty()
user.orig = 'some@place'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
user.dest = 'who@cares'
self.assertRaises(smtp.SMTPBadRcpt, domain.exists, user)
class RelayerTestCase(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.messageFiles = []
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
f = file(name + '-H', 'w')
pickle.dump(['from-%d' % (i,), 'to-%d' % (i,)], f)
f.close()
f = file(name + '-D', 'w')
f.write(name)
f.seek(0, 0)
self.messageFiles.append(name)
self.R = mail.relay.RelayerMixin()
self.R.loadMessages(self.messageFiles)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testMailFrom(self):
for i in range(10):
self.assertEquals(self.R.getMailFrom(), 'from-%d' % (i,))
self.R.sentMail(250, None, None, None, None)
self.assertEquals(self.R.getMailFrom(), None)
def testMailTo(self):
for i in range(10):
self.assertEquals(self.R.getMailTo(), ['to-%d' % (i,)])
self.R.sentMail(250, None, None, None, None)
self.assertEquals(self.R.getMailTo(), None)
def testMailData(self):
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
self.assertEquals(self.R.getMailData().read(), name)
self.R.sentMail(250, None, None, None, None)
self.assertEquals(self.R.getMailData(), None)
class Manager:
def __init__(self):
self.success = []
self.failure = []
self.done = []
def notifySuccess(self, factory, message):
self.success.append((factory, message))
def notifyFailure(self, factory, message):
self.failure.append((factory, message))
def notifyDone(self, factory):
self.done.append(factory)
class ManagedRelayerTestCase(unittest.TestCase):
def setUp(self):
self.manager = Manager()
self.messages = range(0, 20, 2)
self.factory = object()
self.relay = mail.relaymanager.ManagedRelayerMixin(self.manager)
self.relay.messages = self.messages[:]
self.relay.names = self.messages[:]
self.relay.factory = self.factory
def testSuccessfulSentMail(self):
for i in self.messages:
self.relay.sentMail(250, None, None, None, None)
self.assertEquals(
self.manager.success,
[(self.factory, m) for m in self.messages]
)
def testFailedSentMail(self):
for i in self.messages:
self.relay.sentMail(550, None, None, None, None)
self.assertEquals(
self.manager.failure,
[(self.factory, m) for m in self.messages]
)
def testConnectionLost(self):
self.relay.connectionLost(failure.Failure(Exception()))
self.assertEquals(self.manager.done, [self.factory])
class DirectoryQueueTestCase(unittest.TestCase):
def setUp(self):
# This is almost a test case itself.
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.queue = mail.relaymanager.Queue(self.tmpdir)
self.queue.noisy = False
for m in range(25):
hdrF, msgF = self.queue.createNewMessage()
pickle.dump(['header', m], hdrF)
hdrF.close()
msgF.lineReceived('body: %d' % (m,))
msgF.eomReceived()
self.queue.readDirectory()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testWaiting(self):
self.failUnless(self.queue.hasWaiting())
self.assertEquals(len(self.queue.getWaiting()), 25)
waiting = self.queue.getWaiting()
self.queue.setRelaying(waiting[0])
self.assertEquals(len(self.queue.getWaiting()), 24)
self.queue.setWaiting(waiting[0])
self.assertEquals(len(self.queue.getWaiting()), 25)
def testRelaying(self):
for m in self.queue.getWaiting():
self.queue.setRelaying(m)
self.assertEquals(
len(self.queue.getRelayed()),
25 - len(self.queue.getWaiting())
)
self.failIf(self.queue.hasWaiting())
relayed = self.queue.getRelayed()
self.queue.setWaiting(relayed[0])
self.assertEquals(len(self.queue.getWaiting()), 1)
self.assertEquals(len(self.queue.getRelayed()), 24)
def testDone(self):
msg = self.queue.getWaiting()[0]
self.queue.setRelaying(msg)
self.queue.done(msg)
self.assertEquals(len(self.queue.getWaiting()), 24)
self.assertEquals(len(self.queue.getRelayed()), 0)
self.failIf(msg in self.queue.getWaiting())
self.failIf(msg in self.queue.getRelayed())
def testEnvelope(self):
envelopes = []
for msg in self.queue.getWaiting():
envelopes.append(self.queue.getEnvelope(msg))
envelopes.sort()
for i in range(25):
self.assertEquals(
envelopes.pop(0),
['header', i]
)
from twisted.names import server
from twisted.names import client
from twisted.names import common
class TestAuthority(common.ResolverBase):
def __init__(self):
common.ResolverBase.__init__(self)
self.addresses = {}
def _lookup(self, name, cls, type, timeout = None):
if name in self.addresses and type == dns.MX:
results = []
for a in self.addresses[name]:
hdr = dns.RRHeader(
name, dns.MX, dns.IN, 60, dns.Record_MX(0, a)
)
results.append(hdr)
return defer.succeed((results, [], []))
return defer.fail(failure.Failure(dns.DomainError(name)))
def setUpDNS(self):
self.auth = TestAuthority()
factory = server.DNSServerFactory([self.auth])
protocol = dns.DNSDatagramProtocol(factory)
while 1:
self.port = reactor.listenTCP(0, factory, interface='127.0.0.1')
portNumber = self.port.getHost().port
try:
self.udpPort = reactor.listenUDP(portNumber, protocol, interface='127.0.0.1')
except CannotListenError:
self.port.stopListening()
else:
break
self.resolver = client.Resolver(servers=[('127.0.0.1', portNumber)])
def tearDownDNS(self):
dl = []
dl.append(defer.maybeDeferred(self.port.stopListening))
dl.append(defer.maybeDeferred(self.udpPort.stopListening))
if self.resolver.protocol.transport is not None:
dl.append(defer.maybeDeferred(self.resolver.protocol.transport.stopListening))
try:
self.resolver._parseCall.cancel()
except:
pass
return defer.DeferredList(dl)
class MXTestCase(unittest.TestCase):
"""
Tests for L{mail.relaymanager.MXCalculator}.
"""
def setUp(self):
setUpDNS(self)
self.clock = task.Clock()
self.mx = mail.relaymanager.MXCalculator(self.resolver, self.clock)
def tearDown(self):
return tearDownDNS(self)
def test_defaultClock(self):
"""
L{MXCalculator}'s default clock is C{twisted.internet.reactor}.
"""
self.assertIdentical(
mail.relaymanager.MXCalculator(self.resolver).clock,
reactor)
def testSimpleSuccess(self):
self.auth.addresses['test.domain'] = ['the.email.test.domain']
return self.mx.getMX('test.domain').addCallback(self._cbSimpleSuccess)
def _cbSimpleSuccess(self, mx):
self.assertEquals(mx.preference, 0)
self.assertEquals(str(mx.name), 'the.email.test.domain')
def testSimpleFailure(self):
self.mx.fallbackToDomain = False
return self.assertFailure(self.mx.getMX('test.domain'), IOError)
def testSimpleFailureWithFallback(self):
return self.assertFailure(self.mx.getMX('test.domain'), DNSLookupError)
def _exchangeTest(self, domain, records, correctMailExchange):
"""
Issue an MX request for the given domain and arrange for it to be
responded to with the given records. Verify that the resulting mail
exchange is the indicated host.
@type domain: C{str}
@type records: C{list} of L{RRHeader}
@type correctMailExchange: C{str}
@rtype: L{Deferred}
"""
class DummyResolver(object):
def lookupMailExchange(self, name):
if name == domain:
return defer.succeed((
records,
[],
[]))
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
def gotMailExchange(record):
self.assertEqual(str(record.name), correctMailExchange)
d.addCallback(gotMailExchange)
return d
def test_mailExchangePreference(self):
"""
The MX record with the lowest preference is returned by
L{MXCalculator.getMX}.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, good)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(2, bad))]
return self._exchangeTest(domain, records, good)
def test_badExchangeExcluded(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
which is not also marked as bad.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(bad)
return self._exchangeTest(domain, records, good)
def test_fallbackForAllBadExchanges(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if all the MX records in the response have been marked bad.
"""
domain = "example.com"
bad = "bad.example.com"
worse = "worse.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, worse))]
self.mx.markBad(bad)
self.mx.markBad(worse)
return self._exchangeTest(domain, records, bad)
def test_badExchangeExpires(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was last marked bad longer than L{MXCalculator.timeOutBadMX}
seconds ago.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_goodExchangeUsed(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was marked good after it was marked bad.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.mx.markGood(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_successWithoutResults(self):
"""
If an MX lookup succeeds but the result set is empty,
L{MXCalculator.getMX} should try to look up an I{A} record for the
requested name and call back its returned Deferred with that
address.
"""
ip = '1.2.3.4'
domain = 'example.org'
class DummyResolver(object):
"""
Fake resolver which will respond to an MX lookup with an empty
result set.
@ivar mx: A dictionary mapping hostnames to three-tuples of
results to be returned from I{MX} lookups.
@ivar a: A dictionary mapping hostnames to addresses to be
returned from I{A} lookups.
"""
mx = {domain: ([], [], [])}
a = {domain: ip}
def lookupMailExchange(self, domain):
return defer.succeed(self.mx[domain])
def getHostByName(self, domain):
return defer.succeed(self.a[domain])
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
d.addCallback(self.assertEqual, Record_MX(name=ip))
return d
def test_failureWithSuccessfulFallback(self):
"""
Test that if the MX record lookup fails, fallback is enabled, and an A
record is available for the name, then the Deferred returned by
L{MXCalculator.getMX} ultimately fires with a Record_MX instance which
gives the address in the A record for the name.
"""
class DummyResolver(object):
"""
Fake resolver which will fail an MX lookup but then succeed a
getHostByName call.
"""
def lookupMailExchange(self, domain):
return defer.fail(DNSNameError())
def getHostByName(self, domain):
return defer.succeed("1.2.3.4")
self.mx.resolver = DummyResolver()
d = self.mx.getMX("domain")
d.addCallback(self.assertEqual, Record_MX(name="1.2.3.4"))
return d
def test_cnameWithoutGlueRecords(self):
"""
If an MX lookup returns a single CNAME record as a result, MXCalculator
will perform an MX lookup for the canonical name indicated and return
the MX record which results.
"""
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
"""
Fake resolver which will return a CNAME for an MX lookup of a name
which is an alias and an MX for an MX lookup of the canonical name.
"""
def lookupMailExchange(self, domain):
if domain == alias:
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical))],
[], []))
elif domain == canonical:
return defer.succeed((
[RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, exchange))],
[], []))
else:
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameChain(self):
"""
If L{MXCalculator.getMX} encounters a CNAME chain which is longer than
the length specified, the returned L{Deferred} should errback with
L{CanonicalNameChainTooLong}.
"""
class DummyResolver(object):
"""
Fake resolver which generates a CNAME chain of infinite length in
response to MX lookups.
"""
chainCounter = 0
def lookupMailExchange(self, domain):
self.chainCounter += 1
name = 'x-%d.example.com' % (self.chainCounter,)
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(name))],
[], []))
cnameLimit = 3
self.mx.resolver = DummyResolver()
d = self.mx.getMX("mail.example.com", cnameLimit)
self.assertFailure(
d, twisted.mail.relaymanager.CanonicalNameChainTooLong)
def cbChainTooLong(error):
self.assertEqual(error.args[0], Record_CNAME("x-%d.example.com" % (cnameLimit + 1,)))
self.assertEqual(self.mx.resolver.chainCounter, cnameLimit + 1)
d.addCallback(cbChainTooLong)
return d
def test_cnameWithGlueRecords(self):
"""
If an MX lookup returns a CNAME and the MX record for the CNAME, the
L{Deferred} returned by L{MXCalculator.getMX} should be called back
with the name from the MX record without further lookups being
attempted.
"""
lookedUp = []
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
if domain != alias or lookedUp:
# Don't give back any results for anything except the alias
# or on any request after the first.
return ([], [], [])
return defer.succeed((
[RRHeader(name=alias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical)),
RRHeader(name=canonical,
type=Record_MX.TYPE,
payload=Record_MX(name=exchange))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameLoopWithGlueRecords(self):
"""
If an MX lookup returns two CNAME records which point to each other,
the loop should be detected and the L{Deferred} returned by
L{MXCalculator.getMX} should be errbacked with L{CanonicalNameLoop}.
"""
firstAlias = "cname1.example.com"
secondAlias = "cname2.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
return defer.succeed((
[RRHeader(name=firstAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(secondAlias)),
RRHeader(name=secondAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(firstAlias))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(firstAlias)
self.assertFailure(d, twisted.mail.relaymanager.CanonicalNameLoop)
return d
def testManyRecords(self):
self.auth.addresses['test.domain'] = [
'mx1.test.domain', 'mx2.test.domain', 'mx3.test.domain'
]
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsSuccessfulLookup
)
def _cbManyRecordsSuccessfulLookup(self, mx):
self.failUnless(str(mx.name).split('.', 1)[0] in ('mx1', 'mx2', 'mx3'))
self.mx.markBad(str(mx.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsDifferentResult, mx
)
def _cbManyRecordsDifferentResult(self, nextMX, mx):
self.assertNotEqual(str(mx.name), str(nextMX.name))
self.mx.markBad(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsLastResult, mx, nextMX
)
def _cbManyRecordsLastResult(self, lastMX, mx, nextMX):
self.assertNotEqual(str(mx.name), str(lastMX.name))
self.assertNotEqual(str(nextMX.name), str(lastMX.name))
self.mx.markBad(str(lastMX.name))
self.mx.markGood(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsRepeatSpecificResult, nextMX
)
def _cbManyRecordsRepeatSpecificResult(self, againMX, nextMX):
self.assertEqual(str(againMX.name), str(nextMX.name))
class LiveFireExercise(unittest.TestCase):
if interfaces.IReactorUDP(reactor, None) is None:
skip = "UDP support is required to determining MX records"
def setUp(self):
setUpDNS(self)
self.tmpdirs = [
'domainDir', 'insertionDomain', 'insertionQueue',
'destinationDomain', 'destinationQueue'
]
def tearDown(self):
for d in self.tmpdirs:
if os.path.exists(d):
shutil.rmtree(d)
return tearDownDNS(self)
def testLocalDelivery(self):
service = mail.mail.MailService()
service.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(service, 'domainDir')
domain.addUser('user', 'password')
service.addDomain('test.domain', domain)
service.portals[''] = service.portals['test.domain']
map(service.portals[''].registerChecker, domain.getCredentialsCheckers())
service.setQueue(mail.relay.DomainQueuer(service))
manager = mail.relaymanager.SmartHostSMTPRelayingManager(service.queue, None)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
f = service.getSMTPFactory()
self.smtpServer = reactor.listenTCP(0, f, interface='127.0.0.1')
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@hostname>',
'RCPT TO: <user@test.domain>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.smtpServer.getHost().port, f)
def finished(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.failIfEqual(msg.find('This is the message'), -1)
return self.smtpServer.stopListening()
done.addCallback(finished)
return done
def testRelayDelivery(self):
# Here is the service we will connect to and send mail from
insServ = mail.mail.MailService()
insServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(insServ, 'insertionDomain')
insServ.addDomain('insertion.domain', domain)
os.mkdir('insertionQueue')
insServ.setQueue(mail.relaymanager.Queue('insertionQueue'))
insServ.domains.setDefaultDomain(mail.relay.DomainQueuer(insServ))
manager = mail.relaymanager.SmartHostSMTPRelayingManager(insServ.queue)
manager.fArgs += ('test.identity.hostname',)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
# Yoink! Now the internet obeys OUR every whim!
manager.mxcalc = mail.relaymanager.MXCalculator(self.resolver)
# And this is our whim.
self.auth.addresses['destination.domain'] = ['127.0.0.1']
f = insServ.getSMTPFactory()
self.insServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Here is the service the previous one will connect to for final
# delivery
destServ = mail.mail.MailService()
destServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(destServ, 'destinationDomain')
domain.addUser('user', 'password')
destServ.addDomain('destination.domain', domain)
os.mkdir('destinationQueue')
destServ.setQueue(mail.relaymanager.Queue('destinationQueue'))
manager2 = mail.relaymanager.SmartHostSMTPRelayingManager(destServ.queue)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
helper.startService()
f = destServ.getSMTPFactory()
self.destServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Update the port number the *first* relay will connect to, because we can't use
# port 25
manager.PORT = self.destServer.getHost().port
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@wherever>',
'RCPT TO: <user@destination.domain>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.insServer.getHost().port, f)
def finished(ign):
# First part of the delivery is done. Poke the queue manually now
# so we don't have to wait for the queue to be flushed.
delivery = manager.checkState()
def delivered(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.failIfEqual(msg.find('This is the message'), -1)
self.insServer.stopListening()
self.destServer.stopListening()
helper.stopService()
delivery.addCallback(delivered)
return delivery
done.addCallback(finished)
return done
aliasFile = StringIO.StringIO("""\
# Here's a comment
# woop another one
testuser: address1,address2, address3,
continuation@address, |/bin/process/this
usertwo:thisaddress,thataddress, lastaddress
lastuser: :/includable, /filename, |/program, address
""")
class LineBufferMessage:
def __init__(self):
self.lines = []
self.eom = False
self.lost = False
def lineReceived(self, line):
self.lines.append(line)
def eomReceived(self):
self.eom = True
return defer.succeed('<Whatever>')
def connectionLost(self):
self.lost = True
class AliasTestCase(unittest.TestCase):
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def setUp(self):
aliasFile.seek(0)
def testHandle(self):
result = {}
lines = [
'user: another@host\n',
'nextuser: |/bin/program\n',
'user: me@again\n',
'moreusers: :/etc/include/filename\n',
'multiuser: first@host, second@host,last@anotherhost',
]
for l in lines:
mail.alias.handle(result, l, 'TestCase', None)
self.assertEquals(result['user'], ['another@host', 'me@again'])
self.assertEquals(result['nextuser'], ['|/bin/program'])
self.assertEquals(result['moreusers'], [':/etc/include/filename'])
self.assertEquals(result['multiuser'], ['first@host', 'second@host', 'last@anotherhost'])
def testFileLoader(self):
domains = {'': object()}
result = mail.alias.loadAliasFile(domains, fp=aliasFile)
self.assertEquals(len(result), 3)
group = result['testuser']
s = str(group)
for a in ('address1', 'address2', 'address3', 'continuation@address', '/bin/process/this'):
self.failIfEqual(s.find(a), -1)
self.assertEquals(len(group), 5)
group = result['usertwo']
s = str(group)
for a in ('thisaddress', 'thataddress', 'lastaddress'):
self.failIfEqual(s.find(a), -1)
self.assertEquals(len(group), 3)
group = result['lastuser']
s = str(group)
self.failUnlessEqual(s.find('/includable'), -1)
for a in ('/filename', 'program', 'address'):
self.failIfEqual(s.find(a), -1, '%s not found' % a)
self.assertEquals(len(group), 3)
def testMultiWrapper(self):
msgs = LineBufferMessage(), LineBufferMessage(), LineBufferMessage()
msg = mail.alias.MultiWrapper(msgs)
for L in self.lines:
msg.lineReceived(L)
return msg.eomReceived().addCallback(self._cbMultiWrapper, msgs)
def _cbMultiWrapper(self, ignored, msgs):
for m in msgs:
self.failUnless(m.eom)
self.failIf(m.lost)
self.assertEquals(self.lines, m.lines)
def testFileAlias(self):
tmpfile = self.mktemp()
a = mail.alias.FileAlias(tmpfile, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
return m.eomReceived().addCallback(self._cbTestFileAlias, tmpfile)
def _cbTestFileAlias(self, ignored, tmpfile):
lines = file(tmpfile).readlines()
self.assertEquals([L[:-1] for L in lines], self.lines)
class DummyProcess(object):
__slots__ = ['onEnd']
class MockProcessAlias(mail.alias.ProcessAlias):
"""
A alias processor that doesn't actually launch processes.
"""
def spawnProcess(self, proto, program, path):
"""
Don't spawn a process.
"""
class MockAliasGroup(mail.alias.AliasGroup):
"""
An alias group using C{MockProcessAlias}.
"""
processAliasFactory = MockProcessAlias
class StubProcess(object):
"""
Fake implementation of L{IProcessTransport}.
@ivar signals: A list of all the signals which have been sent to this fake
process.
"""
def __init__(self):
self.signals = []
def loseConnection(self):
"""
No-op implementation of disconnection.
"""
def signalProcess(self, signal):
"""
Record a signal sent to this process for later inspection.
"""
self.signals.append(signal)
class ProcessAliasTestCase(unittest.TestCase):
"""
Tests for alias resolution.
"""
if interfaces.IReactorProcess(reactor, None) is None:
skip = "IReactorProcess not supported"
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def exitStatus(self, code):
"""
Construct a status from the given exit code.
@type code: L{int} between 0 and 255 inclusive.
@param code: The exit status which the code will represent.
@rtype: L{int}
@return: A status integer for the given exit code.
"""
# /* Macros for constructing status values. */
# #define __W_EXITCODE(ret, sig) ((ret) << 8 | (sig))
status = (code << 8) | 0
# Sanity check
self.assertTrue(os.WIFEXITED(status))
self.assertEqual(os.WEXITSTATUS(status), code)
self.assertFalse(os.WIFSIGNALED(status))
return status
def signalStatus(self, signal):
"""
Construct a status from the given signal.
@type signal: L{int} between 0 and 255 inclusive.
@param signal: The signal number which the status will represent.
@rtype: L{int}
@return: A status integer for the given signal.
"""
# /* If WIFSIGNALED(STATUS), the terminating signal. */
# #define __WTERMSIG(status) ((status) & 0x7f)
# /* Nonzero if STATUS indicates termination by a signal. */
# #define __WIFSIGNALED(status) \
# (((signed char) (((status) & 0x7f) + 1) >> 1) > 0)
status = signal
# Sanity check
self.assertTrue(os.WIFSIGNALED(status))
self.assertEqual(os.WTERMSIG(status), signal)
self.assertFalse(os.WIFEXITED(status))
return status
def setUp(self):
"""
Replace L{smtp.DNSNAME} with a well-known value.
"""
self.DNSNAME = smtp.DNSNAME
smtp.DNSNAME = ''
def tearDown(self):
"""
Restore the original value of L{smtp.DNSNAME}.
"""
smtp.DNSNAME = self.DNSNAME
def test_processAlias(self):
"""
Standard call to C{mail.alias.ProcessAlias}: check that the specified
script is called, and that the input is correctly transferred to it.
"""
sh = FilePath(self.mktemp())
sh.setContent("""\
#!/bin/sh
rm -f process.alias.out
while read i; do
echo $i >> process.alias.out
done""")
os.chmod(sh.path, 0700)
a = mail.alias.ProcessAlias(sh.path, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
def _cbProcessAlias(ignored):
lines = file('process.alias.out').readlines()
self.assertEquals([L[:-1] for L in lines], self.lines)
return m.eomReceived().addCallback(_cbProcessAlias)
def test_processAliasTimeout(self):
"""
If the alias child process does not exit within a particular period of
time, the L{Deferred} returned by L{MessageWrapper.eomReceived} should
fail with L{ProcessAliasTimeout} and send the I{KILL} signal to the
child process..
"""
reactor = task.Clock()
transport = StubProcess()
proto = mail.alias.ProcessAliasProtocol()
proto.makeConnection(transport)
receiver = mail.alias.MessageWrapper(proto, None, reactor)
d = receiver.eomReceived()
reactor.advance(receiver.completionTimeout)
def timedOut(ignored):
self.assertEqual(transport.signals, ['KILL'])
# Now that it has been killed, disconnect the protocol associated
# with it.
proto.processEnded(
ProcessTerminated(self.signalStatus(signal.SIGKILL)))
self.assertFailure(d, mail.alias.ProcessAliasTimeout)
d.addCallback(timedOut)
return d
def test_earlyProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
before I{eomReceived} is called, the L{Deferred} returned by
I{eomReceived} should fail.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(failure.Failure(ProcessDone(0)))
return self.assertFailure(receiver.eomReceived(), ProcessDone)
def _terminationTest(self, status):
"""
Verify that if the process associated with an
L{mail.alias.MessageWrapper} exits with the given status, the
L{Deferred} returned by I{eomReceived} fails with L{ProcessTerminated}.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(
failure.Failure(ProcessTerminated(status)))
return self.assertFailure(receiver.eomReceived(), ProcessTerminated)
def test_errorProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
with a non-zero exit code, the L{Deferred} returned by I{eomReceived}
should fail.
"""
return self._terminationTest(self.exitStatus(1))
def test_signalProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
because it received a signal, the L{Deferred} returned by
I{eomReceived} should fail.
"""
return self._terminationTest(self.signalStatus(signal.SIGHUP))
def test_aliasResolution(self):
"""
Check that the C{resolve} method of alias processors produce the correct
set of objects:
- direct alias with L{mail.alias.AddressAlias} if a simple input is passed
- aliases in a file with L{mail.alias.FileWrapper} if an input in the format
'/file' is given
- aliases resulting of a process call wrapped by L{mail.alias.MessageWrapper}
if the format is '|process'
"""
aliases = {}
domain = {'': TestDomain(aliases, ['user1', 'user2', 'user3'])}
A1 = MockAliasGroup(['user1', '|echo', '/file'], domain, 'alias1')
A2 = MockAliasGroup(['user2', 'user3'], domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3,
})
res1 = A1.resolve(aliases)
r1 = map(str, res1.objs)
r1.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEquals(r1, expected)
res2 = A2.resolve(aliases)
r2 = map(str, res2.objs)
r2.sort()
expected = map(str, [
mail.alias.AddressAlias('user2', None, None),
mail.alias.AddressAlias('user3', None, None)
])
expected.sort()
self.assertEquals(r2, expected)
res3 = A3.resolve(aliases)
r3 = map(str, res3.objs)
r3.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEquals(r3, expected)
def test_cyclicAlias(self):
"""
Check that a cycle in alias resolution is correctly handled.
"""
aliases = {}
domain = {'': TestDomain(aliases, [])}
A1 = mail.alias.AddressAlias('alias2', domain, 'alias1')
A2 = mail.alias.AddressAlias('alias3', domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3
})
self.assertEquals(aliases['alias1'].resolve(aliases), None)
self.assertEquals(aliases['alias2'].resolve(aliases), None)
self.assertEquals(aliases['alias3'].resolve(aliases), None)
A4 = MockAliasGroup(['|echo', 'alias1'], domain, 'alias4')
aliases['alias4'] = A4
res = A4.resolve(aliases)
r = map(str, res.objs)
r.sort()
expected = map(str, [
mail.alias.MessageWrapper(DummyProcess(), 'echo')
])
expected.sort()
self.assertEquals(r, expected)
class TestDomain:
def __init__(self, aliases, users):
self.aliases = aliases
self.users = users
def exists(self, user, memo=None):
user = user.dest.local
if user in self.users:
return lambda: mail.alias.AddressAlias(user, None, None)
try:
a = self.aliases[user]
except:
raise smtp.SMTPBadRcpt(user)
else:
aliases = a.resolve(self.aliases, memo)
if aliases:
return lambda: aliases
raise smtp.SMTPBadRcpt(user)
from twisted.python.runtime import platformType
import types
if platformType != "posix":
for o in locals().values():
if isinstance(o, (types.ClassType, type)) and issubclass(o, unittest.TestCase):
o.skip = "twisted.mail only works on posix"
| gpl-2.0 |
brandonlee503/COMP1001-Final-Project | 5 Card Poker.py | 1 | 28668 | ###
#5 Card Poker
#Brandon Lee
"""
===Program Design===
The program consists of an implementation of the game "5 Card Poker", utilizing classes.
The program utilizes a deck class to create an array of Card objects in order to simulate a
realistic gaming environment with factors such as luck as well as skill. The program uses
other modules as well as built in modules to check for winnings hands. The game is designed
to entertain a maximum of four players.
===Amount of Effort===
I believe that I put in a good amount of work. Making a working program with classes in python
was a bit new to me and took longer than I had initially expected. However, this did provide me
with a lot of knowledge on how classes can be utilized and how powerful OO programming can be
with Python. Ultimately, I would say I put in a good amount of work into making this game
as my project was not modeled towards any other existing program, but completely designed from
the ground up.
"""
#---Description---
#This program is my implementation of the game "Poker" which utilizes the standard ruleset of the 5 Hand variant.
#The goal of poker is to obtain a better hand than your opponents through strategy and luck by redrawing cards.
#This game supports multiplayer from 1 to 4 players per match.
#---Application Directions (How to Play)---
#1) Start off by inputting total number of players, to which game will initiate
#2) A fresh deck will be shuffled and the game will commence with Player 1's turn,
# which will progress to Player 2, Player 3, and Player 4.
#3) Player 1 will be able to initiate their turn by viewing their hand and respectively
# choosing how many and which cards to discard and redraw from the deck.
#4) Player 1 will redraw any cards selected and the new hand will be displayed.
#5) Player 1's turn ends and repeat steps 3 and 4 for all the other players.
#6) Once all players have ended their turns, all players show their hands and the
# player with the best hand wins.
#7) The winner will get +1 point added to their score and the game offers players to continue playing.
#8) Once users decide to finish game, scores are added up and displayed onto a text file.
#---Poker Hand Rankings--
#1) Royal Flush
#2) Straight Flush
#3) Four of a Kind
#4) Full House
#5) Flush
#6) Straight
#7) Three of a Kind
#8) Two Pair
#9) One Pair
#10)High Card
#---Module Citations---
# Random Library - Used for randint in shuffling and in AI
# Collections Library - Used for checking winning hand combos
###
from random import randint
import collections
###################################################################################################################################
class Card:
"""The card class - Stores card suit and rank. Also able to print cards too."""
def __init__(self, rankCard = "defaultRank", suitCard = "defaultSuit"):
self.rank = rankCard
self.suit = suitCard
#-----------------------------------------------------------------------------------------------------------------------------------
def printCard(self):
"""Print rank and suit of the card."""
#Check if face card, if so print the face
if(self.rank == "11"):
self.cardString = "Jack" + " of " + self.suit
elif(self.rank == "12"):
self.cardString = "Queen" + " of " + self.suit
elif(self.rank == "13"):
self.cardString = "King" + " of " + self.suit
elif(self.rank == "14"):
self.cardString = "Ace" + " of " + self.suit
else:
self.cardString = self.rank + " of " + self.suit
return(self.cardString)
#-----------------------------------------------------------------------------------------------------------------------------------
def getRank(self):
"""Obtain the card's rank."""
return int(self.rank)
#-----------------------------------------------------------------------------------------------------------------------------------
def getSuit(self):
"""Obtain the card's suit."""
return self.suit
###################################################################################################################################
class Deck:
"""The Deck Class - Stores array of cards. Also able to draw, print, and shuffle."""
def __init__(self):
#Initilize range of ranks and suits of all the cards in the deck
#11=Jack , 12=Queen, 13=King, 14=Ace
self.ranks = ["14", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13"]
self.suits = ["Spades", "Clubs", "Hearts", "Diamonds"]
#Create array of cards
self.deckArray = [Card() for i in range(52)]
#Fill the deck array
for i in range(52):
self.deckArray[i] = Card( self.ranks[int(i%13)] , self.suits[int(i/13)] )
#Set remaining cards in deck
self.avaliCards = 52
#Initilize as -1 because of drawCardFromTop() method
self.deckTop = -1
#-----------------------------------------------------------------------------------------------------------------------------------
def shuffleDeck(self):
"""Randomizes the deck"""
#For each card in the deck, swap it with another random card
for i in range(51):
random = randint(0,51)
tempCard = self.deckArray[i]
self.deckArray[i] = self.deckArray[random]
self.deckArray[random] = tempCard
#-----------------------------------------------------------------------------------------------------------------------------------
def printDeck(self):
"""Prints the deck"""
for i in range(self.avaliCards):
print( self.deckArray[i].printCard() )
#-----------------------------------------------------------------------------------------------------------------------------------
def drawCardFromTop(self):
"""Draws card from top of the deck"""
#Find the top card in the deck
topCard = self.deckArray[0]
#For each card after top card, move it up one
for self.deckTop in range(51):
self.deckArray[self.deckTop] = self.deckArray[self.deckTop+1]
self.avaliCards -= 1
return topCard
###################################################################################################################################
class Player:
"""The Player Class - Stores and prints player's hand, able to draw cards from deck and check poker hand rankings."""
def __init__(self):
self.hand = [Card for i in range(5)]
self.handSize = 0
self.handValue = 0
#-----------------------------------------------------------------------------------------------------------------------------------
def drawStartingHand(self, deckObject):
"""Draws the player's starting hand of five cards."""
for i in range(5):
self.hand[self.handSize] = deckObject.drawCardFromTop()
self.handSize += 1
#-----------------------------------------------------------------------------------------------------------------------------------
def printHand(self):
"""Prints the player's hand of cards."""
for i in range(self.handSize):
print(i+1,") ", self.hand[i].printCard(), sep="")
#-----------------------------------------------------------------------------------------------------------------------------------
def reDrawCard(self, deckObj):
"""Prompts user to re-draw any cards during turn."""
alreadySelected = []
#Prompt user with loop to check for errors
while(True):
userInput = input("How many cards would you like to redraw? (0-5):")
if(userInput.isdigit() and int(userInput) >= 0 and int(userInput) <= 5):
break
else:
print("Please input a valid number of cards")
if(userInput != 0):
for i in range( int(userInput) ):
#Check if out of bounds or already selected
while(True):
whichCardInput = input("Which card do you want to remove? (1-5):")
if(whichCardInput.isdigit() and int(whichCardInput) >= 1 and int(whichCardInput) <= 5 and (whichCardInput not in alreadySelected)):
break
else:
print("Please input a valid card, (1-5 with no repeating numbers)")
self.hand[ int(whichCardInput)-1 ] = deckObj.drawCardFromTop()
#Add to list of cards already replaced
alreadySelected.extend(whichCardInput)
#-----------------------------------------------------------------------------------------------------------------------------------
def reDrawCardAI(self, deckObj):
"""Prompts AI to re-draw any cards during turn."""
alreadySelected = []
#Prompt AI with loop to check for errors
while(True):
input("How many cards would you like to redraw? (0-5):")
userInput = randint(0,5)
print(userInput)
if(int(userInput) >= 0 and int(userInput) <= 5):
break
else:
print("Please input a valid number of cards")
if(userInput != 0):
for i in range( int(userInput) ):
#Check if out of bounds or already selected
while(True):
input("Which card do you want to remove? (1-5):")
whichCardInput = randint(1,5)
print(whichCardInput)
if(int(whichCardInput) >= 1 and int(whichCardInput) <= 5 and (str(whichCardInput) not in alreadySelected)):
break
else:
print("Please input a valid card, (1-5 with no repeating numbers)")
self.hand[ int(whichCardInput)-1 ] = deckObj.drawCardFromTop()
#Add to list of cards already replaced
alreadySelected.extend(str(whichCardInput))
#-----------------------------------------------------------------------------------------------------------------------------------
def checkHandCombo(self):
"""Checks player's hand for possible poker hands and returns their respective value."""
#Create and fill list for rank checking
rankList = []
for i in range(self.handSize):
rankList.append(self.hand[i].getRank())
#Utilize counter module for easy counting of pairs
counter = collections.Counter(rankList)
#Sort the list for checking straights
rankList.sort()
#Format = [Rank][frequency]
mostFreq = counter.most_common(1)[0][1] #Frequency of the most frequent card in hand
secondMostFreq = counter.most_common(2)[1][1] #Frequency of the 2nd most frequent card in hand
freqRank = counter.most_common(2)[0][0] #Most frequent card's rank
secondFreqRank = counter.most_common(2)[1][0] #2nd most frequent card's rank
#Check 4 Pair
if(mostFreq == 4):
print("Four of a kind of ", self.printFace(freqRank), "'s!", sep="")
return int(freqRank) * 10000000
#Check Full House
if(mostFreq == 3 and secondMostFreq == 2):
print("Full House of ", self.print2Faces(freqRank), " and ", self.print2Faces(secondFreqRank), "'s!", sep="")
return int(freqRank) * 1000000 + int(secondFreqRank)
#Check for Royal Flush | Straight Flush | Flush
if(self.isFlush() == True):
if(self.isStraight(rankList)):
if(rankList[0] == 10):
print("ROYAL FLUSH")
return 1000000000
else:
print("Straight Flush of ", rankList[0], " to " , rankList[4], sep="")
return rankList[4] * 100000000
else:
print("Flush of ", self.hand[0].getSuit(), "!", sep="")
return rankList[4] * 100000
#Check Straight
if(self.isStraight(rankList)):
print("Straight with a max of ", rankList[4], "!", sep="")
return rankList[4] * 10000
#Check 3 Pair
if(mostFreq == 3):
print("Three of a kind of ", self.printFace(freqRank), "'s!", sep="")
return int(freqRank) * 1000
#Check 2 Pairs
if(mostFreq == secondMostFreq and secondMostFreq == 2):
print("Two pairs of ", self.print2Faces(freqRank), " and ", self.print2Faces(secondFreqRank), "'s!", sep="")
return int(freqRank) * 100 + int(secondFreqRank)
#Check Pair
if(mostFreq == 2):
print("Pair of ", self.print2Faces(freqRank), "'s!", sep="")
return int(freqRank) * 10
#Check HighCard
if(mostFreq == 1):
print("High Card of ", self.printFace(rankList[4]), "'s!", sep="")
return self.getMax()
#-----------------------------------------------------------------------------------------------------------------------------------
def isFlush(self):
"""Check if hand is a flush, returns boolean."""
if(self.hand[0].getSuit() == self.hand[1].getSuit() == self.hand[2].getSuit() == self.hand[3].getSuit() == self.hand[4].getSuit()):
return True
else:
return False
#-----------------------------------------------------------------------------------------------------------------------------------
def isStraight(self, rankList):
"""Check if hand is a straight, returns boolean."""
if(rankList[0]+4 == rankList[1]+3 == rankList[2]+2 == rankList[3]+1 == rankList[4]):
return True
else:
return False
#-----------------------------------------------------------------------------------------------------------------------------------
def getMax(self):
"""Get the highCard in a hand."""
theMax = -1
for i in range(self.handSize):
if(theMax < self.hand[i].getRank()):
theMax = self.hand[i].getRank()
return theMax
#-----------------------------------------------------------------------------------------------------------------------------------
def printFace(self, theRank):
"""Print the max value in the hand."""
if(self.getMax() == 11):
theRank = "Jack"
elif(self.getMax() == 12):
theRank = "Queen"
elif(self.getMax() == 13):
theRank = "King"
elif(self.getMax() == 14):
theRank = "Ace"
return theRank
#-----------------------------------------------------------------------------------------------------------------------------------
def print2Faces(self, theRank):
"""Prints 2nd most signifigant value for Full House or Two Pair."""
if(theRank == 11):
theRank = "Jack"
elif(theRank == 12):
theRank = "Queen"
elif(theRank == 13):
theRank = "King"
elif(theRank == 14):
theRank = "Ace"
return theRank
###################################################################################################################################
class Game:
"""The Game Class - Stores player scores and starts the game depending on number of players."""
def __init__(self):
#Initilize all player scores
self.player1score = 0
self.player2score = 0
self.player3score = 0
self.player4score = 0
self.playerNum = self.intro()
#-----------------------------------------------------------------------------------------------------------------------------------
def clearScreen(self):
"""Clears the screen."""
print("\n" * 100)
#-----------------------------------------------------------------------------------------------------------------------------------
def intro(self):
"""Introduction/Instructions on how to play '5 Card Poker' Also prompts user for number of players and executes game."""
print(" WELCOME TO 5 CARD POKER")
print("\t===HOW TO PLAY===")
print("\t---Description---")
print("This program is my implementation of the game 'Poker' which utilizes the standard ruleset of the 5 Hand variant.")
print("The goal of poker is to obtain a better hand than your opponents through strategy and luck by redrawing cards.")
print("This game supports multiplayer from 1 to 4 players per match.")
print("")
while(True):
playerNum = input("Please enter the number of players(1-4), or 5 for hand rankings, or '0' to quit: ")
if(playerNum.isdigit() and playerNum =="5"):
print("\t---Hand Rankings---")
print("01) Royal Flush")
print("02) Straight Flush")
print("03) Four of a Kind")
print("04) Full House")
print("05) Flush")
print("06) Straight")
print("07) Three of a Kind")
print("08) Two Pair")
print("09) One Pair")
print("10) High Card")
if(playerNum.isdigit() and playerNum == "1" or playerNum == "2" or playerNum == "3" or playerNum == "4"):
self.startGame(int(playerNum))
if(playerNum.isdigit() and playerNum == "0"):
print(" ***Thanks for Playing!***")
print("Your scores will be displayed in pokerScores.txt!")
scoreFile = open("pokerScores.txt", "w")
content1 = "Poker Scores:\nPlayer 1 Score: " + str(self.player1score) + "\nPlayer 2 Score: " + str(self.player2score)
content2 = "\nPlayer 3 Score: " + str(self.player3score) + "\nPlayer 4 Score: " + str(self.player4score)
content3 = content1 + content2
scoreFile.write(content3)
return
else:
print("Your input was not valid.")
#-----------------------------------------------------------------------------------------------------------------------------------
def startGame(self, playerNum):
"""Initiates game according to the number of players."""
if(playerNum == 1):
self.onePlayerGame()
return
if(playerNum == 2):
self.twoPlayerGame()
return
if(playerNum == 3):
self.threePlayerGame()
return
if(playerNum == 4):
self.fourPlayerGame()
return
#-----------------------------------------------------------------------------------------------------------------------------------
def onePlayerGame(self):
"""One player Round."""
print("\t===ONE PLAYER GAME VS EASY AI===")
#Create and shuffle deck
theDeck = Deck()
theDeck.shuffleDeck()
#Initilize players
player1 = Player()
player2 = Player()
#Draw cards
player1.drawStartingHand(theDeck)
player2.drawStartingHand(theDeck)
#Player 1's turn
input("Player 1's Turn, please press enter to continue..")
self.clearScreen()
player1.printHand()
player1.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player1.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 2's Turn
input("Player 2's Turn (AI), please press enter to continue..")
player2.printHand()
player2.reDrawCardAI(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player2.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#End Round
input("Please press enter to showdown..")
self.clearScreen()
print("Player 1 has: ", end="")
p1Score = player1.checkHandCombo()
print("Player 2 has: ", end="")
p2Score = player2.checkHandCombo()
theWinner = self.showDown(p1Score, p2Score)
self.winScreen(theWinner)
return
#-----------------------------------------------------------------------------------------------------------------------------------
def twoPlayerGame(self):
"""Standard Two Player Round."""
print("\t===TWO PLAYER GAME===")
#Create and shuffle deck
theDeck = Deck()
theDeck.shuffleDeck()
#Initilize players
player1 = Player()
player2 = Player()
#Draw cards
player1.drawStartingHand(theDeck)
player2.drawStartingHand(theDeck)
#Player 1's turn
input("Player 1's Turn, please press enter to continue..")
self.clearScreen()
player1.printHand()
player1.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player1.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 2's Turn
input("Player 2's Turn, please press enter to continue..")
player2.printHand()
player2.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player2.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#End Round
input("Please press enter to showdown..")
self.clearScreen()
print("Player 1 has: ", end="")
p1Score = player1.checkHandCombo()
print("Player 2 has: ", end="")
p2Score = player2.checkHandCombo()
theWinner = self.showDown(p1Score, p2Score)
self.winScreen(theWinner)
return
#-----------------------------------------------------------------------------------------------------------------------------------
def threePlayerGame(self):
"""Standard Three Player Round."""
print("\t===THREE PLAYER GAME===")
#Create and shuffle deck
theDeck = Deck()
theDeck.shuffleDeck()
#Initilize players
player1 = Player()
player2 = Player()
player3 = Player()
#Draw cards
player1.drawStartingHand(theDeck)
player2.drawStartingHand(theDeck)
player3.drawStartingHand(theDeck)
#Player 1's turn
input("Player 1's Turn, please press enter to continue..")
self.clearScreen()
player1.printHand()
player1.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player1.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 2's Turn
input("Player 2's Turn, please press enter to continue..")
player2.printHand()
player2.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player2.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 3's Turn
input("Player 3's Turn, please press enter to continue..")
player3.printHand()
player3.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player3.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#End Round
input("Please press enter to showdown..")
self.clearScreen()
print("Player 1 has: ", end="")
p1Score = player1.checkHandCombo()
print("Player 2 has: ", end="")
p2Score = player2.checkHandCombo()
print("Player 3 has: ", end="")
p3Score = player3.checkHandCombo()
theWinner = self.showDown(p1Score, p2Score, p3Score)
self.winScreen(theWinner)
return
#-----------------------------------------------------------------------------------------------------------------------------------
def fourPlayerGame(self):
"""Standard Four Player Round."""
print("\t===FOUR PLAYER GAME===")
#Create and shuffle deck
theDeck = Deck()
theDeck.shuffleDeck()
#Initilize players
player1 = Player()
player2 = Player()
player3 = Player()
player4 = Player()
#Draw cards
player1.drawStartingHand(theDeck)
player2.drawStartingHand(theDeck)
player3.drawStartingHand(theDeck)
player4.drawStartingHand(theDeck)
#Player 1's turn
input("Player 1's Turn, please press enter to continue..")
self.clearScreen()
player1.printHand()
player1.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player1.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 2's Turn
input("Player 2's Turn, please press enter to continue..")
player2.printHand()
player2.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player2.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 3's Turn
input("Player 3's Turn, please press enter to continue..")
player3.printHand()
player3.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player3.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#Player 4's Turn
input("Player 4's Turn, please press enter to continue..")
player4.printHand()
player4.reDrawCard(theDeck)
print("----------------------------------------------------")
print("Your redrawn hand:")
player4.printHand()
input("Please press enter to finish turn")
self.clearScreen()
#End Round
input("Please press enter to showdown..")
self.clearScreen()
print("Player 1 has: ", end="")
p1Score = player1.checkHandCombo()
print("Player 2 has: ", end="")
p2Score = player2.checkHandCombo()
print("Player 3 has: ", end="")
p3Score = player3.checkHandCombo()
print("Player 4 has: ", end="")
p4Score = player4.checkHandCombo()
theWinner = self.showDown(p1Score, p2Score, p3Score, p4Score)
self.winScreen(theWinner)
return
#-----------------------------------------------------------------------------------------------------------------------------------
def showDown(self, p1Score=0, p2Score=0, p3Score=0, p4Score=0):
"""Determines winner of the round and checks for any ties as well."""
scoreList = [p1Score, p2Score, p3Score, p4Score]
theMax = scoreList.index(max(scoreList))
#Check if there is a tie
if( len(scoreList) != len(set(scoreList)) ):
for i in range(len(scoreList)):
if(i != theMax and scoreList[i] == scoreList[theMax]):
return None
return theMax
#-----------------------------------------------------------------------------------------------------------------------------------
def winScreen(self, theWinner):
"""Prints winner and adds score."""
if(theWinner == 0):
self.player1score += 1
print("Player 1 Wins! Total Score is: ", self.player1score)
if(theWinner == 1):
self.player2score += 1
print("Player 2 Wins! Total Score is: ", self.player2score)
if(theWinner == 2):
self.player3score += 1
print("Player 3 Wins! Total Score is: ", self.player3score)
if(theWinner == 3):
self.player4score += 1
print("Player 4 Wins! Total Score is: ", self.player4score)
if(theWinner == None):
print("Tie! No winner will be awarded")
return
###################################################################################################################################
def main():
theGame = Game()
main()
| mit |
SylvainCorlay/PyDev.Debugger | pydevd_import_class.py | 57 | 1833 | #Note: code gotten from _pydev_imports_tipper.
import sys
def _imp(name, log=None):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
if log is not None:
log.AddContent('Unable to import', name, 'trying with', sub)
log.AddException()
return _imp(sub, log)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
if log is not None:
log.AddContent(s)
log.AddException()
raise ImportError(s)
IS_IPY = False
if sys.platform == 'cli':
IS_IPY = True
_old_imp = _imp
def _imp(name, log=None):
#We must add a reference in clr for .Net
import clr #@UnresolvedImport
initial_name = name
while '.' in name:
try:
clr.AddReference(name)
break #If it worked, that's OK.
except:
name = name[0:name.rfind('.')]
else:
try:
clr.AddReference(name)
except:
pass #That's OK (not dot net module).
return _old_imp(initial_name, log)
def ImportName(name, log=None):
mod = _imp(name, log)
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
#this happens in the following case:
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
#but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
old_comp = comp
return mod
| epl-1.0 |
mrquim/repository.mrquim | repo/plugin.video.netflix/resources/lib/NetflixHttpSubRessourceHandler.py | 5 | 11355 | # pylint: skip-file
# -*- coding: utf-8 -*-
# Module: NetflixHttpSubRessourceHandler
# Created on: 07.03.2017
class NetflixHttpSubRessourceHandler(object):
"""
Represents the callable internal server routes &
translates/executes them to requests for Netflix
"""
def __init__(self, nx_common, netflix_session):
"""Sets up credentials & video_list_cache cache
Assigns the netflix_session/kodi_helper instacnes
Does the initial login if we have user data
Parameters
----------
kodi_helper : :obj:`KodiHelper`
instance of the KodiHelper class
netflix_session : :obj:`NetflixSession`
instance of the NetflixSession class
"""
self.nx_common = nx_common
self.netflix_session = netflix_session
self.credentials = self.nx_common.get_credentials()
self.profiles = []
self.video_list_cache = {}
self.prefetch_login()
def prefetch_login(self):
"""Check if we have stored credentials.
If so, do the login before the user requests it
If that is done, we cache the profiles
"""
self.profiles = []
email = self.credentials.get('email', '')
password = self.credentials.get('password', '')
if email != '' and password != '':
if self.netflix_session.is_logged_in(account=self.credentials):
refresh_session = self.netflix_session.refresh_session_data(
account=self.credentials)
if refresh_session:
self.profiles = self.netflix_session.profiles
else:
if self.netflix_session.login(account=self.credentials):
self.profiles = self.netflix_session.profiles
def is_logged_in(self, params):
"""Existing login proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
email = self.credentials.get('email', '')
password = self.credentials.get('password', '')
if email == '' and password == '':
return False
return self.netflix_session.is_logged_in(account=self.credentials)
def logout(self, params):
"""Logout proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
self.profiles = []
self.credentials = {'email': '', 'password': ''}
return self.netflix_session.logout()
def login(self, params):
"""Logout proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
email = params.get('email', [''])[0]
password = params.get('password', [''])[0]
if email != '' and password != '':
self.credentials = {'email': email, 'password': password}
_ret = self.netflix_session.login(account=self.credentials)
self.profiles = self.netflix_session.profiles
return _ret
return None
def list_profiles(self, params):
"""Returns the cached list of profiles
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`dict` of :obj:`str`
List of profiles
"""
return self.profiles
def get_esn(self, params):
"""ESN getter function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`str`
Exracted ESN
"""
return self.netflix_session.esn
def fetch_video_list_ids(self, params):
"""Video list ids proxy function (caches video lists)
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`list`
Transformed response of the remote call
"""
guid = self.netflix_session.user_data.get('guid')
cached_list = self.video_list_cache.get(guid, None)
if cached_list is not None:
self.nx_common.log(msg='Serving cached list for user: ' + guid)
return cached_list
video_list_ids_raw = self.netflix_session.fetch_video_list_ids()
if 'error' in video_list_ids_raw:
return video_list_ids_raw
video_list = self.netflix_session.parse_video_list_ids(
response_data=video_list_ids_raw)
return video_list
def fetch_video_list(self, params):
"""Video list proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`list`
Transformed response of the remote call
"""
list_id = params.get('list_id', [''])[0]
start = int(params.get('list_from', [0])[0])
end = int(params.get('list_to', [26])[0])
raw_video_list = self.netflix_session.fetch_video_list(
list_id=list_id,
list_from=start,
list_to=end)
if 'error' in raw_video_list:
return raw_video_list
# parse the video list ids
if 'videos' in raw_video_list.get('value', {}).keys():
video_list = self.netflix_session.parse_video_list(
response_data=raw_video_list)
return video_list
return []
def fetch_episodes_by_season(self, params):
"""Episodes for season proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`list`
Transformed response of the remote call
"""
raw_episode_list = self.netflix_session.fetch_episodes_by_season(
season_id=params.get('season_id')[0])
if 'error' in raw_episode_list:
return raw_episode_list
episodes = self.netflix_session.parse_episodes_by_season(
response_data=raw_episode_list)
return episodes
def fetch_seasons_for_show(self, params):
"""Season for show proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`list`
Transformed response of the remote call
"""
show_id = params.get('show_id', [''])[0]
raw_season_list = self.netflix_session.fetch_seasons_for_show(
id=show_id)
if 'error' in raw_season_list:
return raw_season_list
# check if we have sesons,
# announced shows that are not available yet have none
if 'seasons' not in raw_season_list.get('value', {}):
return []
seasons = self.netflix_session.parse_seasons(
id=show_id,
response_data=raw_season_list)
return seasons
def rate_video(self, params):
"""Video rating proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
video_id = params.get('video_id', [''])[0]
rating = params.get('rating', [''])[0]
rate = self.netflix_session.rate_video(
video_id=video_id,
rating=rating)
return rate
def remove_from_list(self, params):
"""Remove from my list proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
video_id = params.get('video_id', [''])[0]
return self.netflix_session.remove_from_list(video_id=video_id)
def add_to_list(self, params):
"""Add to my list proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
video_id = params.get('video_id', [''])[0]
return self.netflix_session.add_to_list(video_id=video_id)
def fetch_metadata(self, params):
"""Metadata proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
video_id = params.get('video_id', [''])[0]
return self.netflix_session.fetch_metadata(id=video_id)
def send_adult_pin(self, params):
"""Checks the adult pin
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
pin = params.get('pin', [''])[0]
return self.netflix_session.send_adult_pin(pin=pin)
def switch_profile(self, params):
"""Switch profile proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`Requests.Response`
Response of the remote call
"""
profile_id = params.get('profile_id', [''])[0]
switch_profile = self.netflix_session.switch_profile(
profile_id=profile_id,
account=self.credentials)
return switch_profile
def get_user_data(self, params):
"""User data getter function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`str`
Exracted User Data
"""
return self.netflix_session.user_data
def search(self, params):
"""Search proxy function
Parameters
----------
params : :obj:`dict` of :obj:`str`
Request params
Returns
-------
:obj:`list`
Transformed response of the remote call
"""
term = params.get('term', [''])[0]
raw_search_results = self.netflix_session.fetch_search_results(
search_str=term)
# determine if we found something
videos = raw_search_results.get('value', {}).get('videos', {})
result_size = len(videos.keys())
# check for any errors
if 'error' in raw_search_results or result_size == 0:
return []
# list the search results
search_results = self.netflix_session.parse_video_list(
response_data=raw_search_results,
term=term)
return search_results
| gpl-2.0 |
ct-23/home-assistant | homeassistant/components/media_player/__init__.py | 3 | 29784 | """
Component to interface with various media players.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/media_player/
"""
import asyncio
from datetime import timedelta
import functools as ft
import hashlib
import logging
import os
from random import SystemRandom
from aiohttp import web
import async_timeout
import voluptuous as vol
from homeassistant.config import load_yaml_config_file
from homeassistant.loader import bind_hass
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components.http import HomeAssistantView, KEY_AUTHENTICATED
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.util.async import run_coroutine_threadsafe
from homeassistant.const import (
STATE_OFF, STATE_UNKNOWN, STATE_PLAYING, STATE_IDLE,
ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON,
SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_SET,
SERVICE_VOLUME_MUTE, SERVICE_TOGGLE, SERVICE_MEDIA_STOP,
SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK,
SERVICE_SHUFFLE_SET)
_LOGGER = logging.getLogger(__name__)
_RND = SystemRandom()
DOMAIN = 'media_player'
DEPENDENCIES = ['http']
SCAN_INTERVAL = timedelta(seconds=10)
ENTITY_ID_FORMAT = DOMAIN + '.{}'
ENTITY_IMAGE_URL = '/api/media_player_proxy/{0}?token={1}&cache={2}'
ATTR_CACHE_IMAGES = 'images'
ATTR_CACHE_URLS = 'urls'
ATTR_CACHE_MAXSIZE = 'maxsize'
ENTITY_IMAGE_CACHE = {
ATTR_CACHE_IMAGES: {},
ATTR_CACHE_URLS: [],
ATTR_CACHE_MAXSIZE: 16
}
CONTENT_TYPE_HEADER = 'Content-Type'
SERVICE_PLAY_MEDIA = 'play_media'
SERVICE_SELECT_SOURCE = 'select_source'
SERVICE_CLEAR_PLAYLIST = 'clear_playlist'
ATTR_MEDIA_VOLUME_LEVEL = 'volume_level'
ATTR_MEDIA_VOLUME_MUTED = 'is_volume_muted'
ATTR_MEDIA_SEEK_POSITION = 'seek_position'
ATTR_MEDIA_CONTENT_ID = 'media_content_id'
ATTR_MEDIA_CONTENT_TYPE = 'media_content_type'
ATTR_MEDIA_DURATION = 'media_duration'
ATTR_MEDIA_POSITION = 'media_position'
ATTR_MEDIA_POSITION_UPDATED_AT = 'media_position_updated_at'
ATTR_MEDIA_TITLE = 'media_title'
ATTR_MEDIA_ARTIST = 'media_artist'
ATTR_MEDIA_ALBUM_NAME = 'media_album_name'
ATTR_MEDIA_ALBUM_ARTIST = 'media_album_artist'
ATTR_MEDIA_TRACK = 'media_track'
ATTR_MEDIA_SERIES_TITLE = 'media_series_title'
ATTR_MEDIA_SEASON = 'media_season'
ATTR_MEDIA_EPISODE = 'media_episode'
ATTR_MEDIA_CHANNEL = 'media_channel'
ATTR_MEDIA_PLAYLIST = 'media_playlist'
ATTR_APP_ID = 'app_id'
ATTR_APP_NAME = 'app_name'
ATTR_INPUT_SOURCE = 'source'
ATTR_INPUT_SOURCE_LIST = 'source_list'
ATTR_MEDIA_ENQUEUE = 'enqueue'
ATTR_MEDIA_SHUFFLE = 'shuffle'
MEDIA_TYPE_MUSIC = 'music'
MEDIA_TYPE_TVSHOW = 'tvshow'
MEDIA_TYPE_VIDEO = 'movie'
MEDIA_TYPE_EPISODE = 'episode'
MEDIA_TYPE_CHANNEL = 'channel'
MEDIA_TYPE_PLAYLIST = 'playlist'
SUPPORT_PAUSE = 1
SUPPORT_SEEK = 2
SUPPORT_VOLUME_SET = 4
SUPPORT_VOLUME_MUTE = 8
SUPPORT_PREVIOUS_TRACK = 16
SUPPORT_NEXT_TRACK = 32
SUPPORT_TURN_ON = 128
SUPPORT_TURN_OFF = 256
SUPPORT_PLAY_MEDIA = 512
SUPPORT_VOLUME_STEP = 1024
SUPPORT_SELECT_SOURCE = 2048
SUPPORT_STOP = 4096
SUPPORT_CLEAR_PLAYLIST = 8192
SUPPORT_PLAY = 16384
SUPPORT_SHUFFLE_SET = 32768
# Service call validation schemas
MEDIA_PLAYER_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
})
MEDIA_PLAYER_SET_VOLUME_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float,
})
MEDIA_PLAYER_MUTE_VOLUME_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_VOLUME_MUTED): cv.boolean,
})
MEDIA_PLAYER_MEDIA_SEEK_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_SEEK_POSITION):
vol.All(vol.Coerce(float), vol.Range(min=0)),
})
MEDIA_PLAYER_SELECT_SOURCE_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_INPUT_SOURCE): cv.string,
})
MEDIA_PLAYER_PLAY_MEDIA_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_CONTENT_TYPE): cv.string,
vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string,
vol.Optional(ATTR_MEDIA_ENQUEUE): cv.boolean,
})
MEDIA_PLAYER_SET_SHUFFLE_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_SHUFFLE): cv.boolean,
})
SERVICE_TO_METHOD = {
SERVICE_TURN_ON: {'method': 'async_turn_on'},
SERVICE_TURN_OFF: {'method': 'async_turn_off'},
SERVICE_TOGGLE: {'method': 'async_toggle'},
SERVICE_VOLUME_UP: {'method': 'async_volume_up'},
SERVICE_VOLUME_DOWN: {'method': 'async_volume_down'},
SERVICE_MEDIA_PLAY_PAUSE: {'method': 'async_media_play_pause'},
SERVICE_MEDIA_PLAY: {'method': 'async_media_play'},
SERVICE_MEDIA_PAUSE: {'method': 'async_media_pause'},
SERVICE_MEDIA_STOP: {'method': 'async_media_stop'},
SERVICE_MEDIA_NEXT_TRACK: {'method': 'async_media_next_track'},
SERVICE_MEDIA_PREVIOUS_TRACK: {'method': 'async_media_previous_track'},
SERVICE_CLEAR_PLAYLIST: {'method': 'async_clear_playlist'},
SERVICE_VOLUME_SET: {
'method': 'async_set_volume_level',
'schema': MEDIA_PLAYER_SET_VOLUME_SCHEMA},
SERVICE_VOLUME_MUTE: {
'method': 'async_mute_volume',
'schema': MEDIA_PLAYER_MUTE_VOLUME_SCHEMA},
SERVICE_MEDIA_SEEK: {
'method': 'async_media_seek',
'schema': MEDIA_PLAYER_MEDIA_SEEK_SCHEMA},
SERVICE_SELECT_SOURCE: {
'method': 'async_select_source',
'schema': MEDIA_PLAYER_SELECT_SOURCE_SCHEMA},
SERVICE_PLAY_MEDIA: {
'method': 'async_play_media',
'schema': MEDIA_PLAYER_PLAY_MEDIA_SCHEMA},
SERVICE_SHUFFLE_SET: {
'method': 'async_set_shuffle',
'schema': MEDIA_PLAYER_SET_SHUFFLE_SCHEMA},
}
ATTR_TO_PROPERTY = [
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_DURATION,
ATTR_MEDIA_POSITION,
ATTR_MEDIA_POSITION_UPDATED_AT,
ATTR_MEDIA_TITLE,
ATTR_MEDIA_ARTIST,
ATTR_MEDIA_ALBUM_NAME,
ATTR_MEDIA_ALBUM_ARTIST,
ATTR_MEDIA_TRACK,
ATTR_MEDIA_SERIES_TITLE,
ATTR_MEDIA_SEASON,
ATTR_MEDIA_EPISODE,
ATTR_MEDIA_CHANNEL,
ATTR_MEDIA_PLAYLIST,
ATTR_APP_ID,
ATTR_APP_NAME,
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_SHUFFLE,
]
@bind_hass
def is_on(hass, entity_id=None):
"""
Return true if specified media player entity_id is on.
Check all media player if no entity_id specified.
"""
entity_ids = [entity_id] if entity_id else hass.states.entity_ids(DOMAIN)
return any(not hass.states.is_state(entity_id, STATE_OFF)
for entity_id in entity_ids)
@bind_hass
def turn_on(hass, entity_id=None):
"""Turn on specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TURN_ON, data)
@bind_hass
def turn_off(hass, entity_id=None):
"""Turn off specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TURN_OFF, data)
@bind_hass
def toggle(hass, entity_id=None):
"""Toggle specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TOGGLE, data)
@bind_hass
def volume_up(hass, entity_id=None):
"""Send the media player the command for volume up."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_VOLUME_UP, data)
@bind_hass
def volume_down(hass, entity_id=None):
"""Send the media player the command for volume down."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_VOLUME_DOWN, data)
@bind_hass
def mute_volume(hass, mute, entity_id=None):
"""Send the media player the command for muting the volume."""
data = {ATTR_MEDIA_VOLUME_MUTED: mute}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_VOLUME_MUTE, data)
@bind_hass
def set_volume_level(hass, volume, entity_id=None):
"""Send the media player the command for setting the volume."""
data = {ATTR_MEDIA_VOLUME_LEVEL: volume}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_VOLUME_SET, data)
@bind_hass
def media_play_pause(hass, entity_id=None):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, data)
@bind_hass
def media_play(hass, entity_id=None):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PLAY, data)
@bind_hass
def media_pause(hass, entity_id=None):
"""Send the media player the command for pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PAUSE, data)
@bind_hass
def media_stop(hass, entity_id=None):
"""Send the media player the stop command."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_STOP, data)
@bind_hass
def media_next_track(hass, entity_id=None):
"""Send the media player the command for next track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data)
@bind_hass
def media_previous_track(hass, entity_id=None):
"""Send the media player the command for prev track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data)
@bind_hass
def media_seek(hass, position, entity_id=None):
"""Send the media player the command to seek in current playing media."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
data[ATTR_MEDIA_SEEK_POSITION] = position
hass.services.call(DOMAIN, SERVICE_MEDIA_SEEK, data)
@bind_hass
def play_media(hass, media_type, media_id, entity_id=None, enqueue=None):
"""Send the media player the command for playing media."""
data = {ATTR_MEDIA_CONTENT_TYPE: media_type,
ATTR_MEDIA_CONTENT_ID: media_id}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
if enqueue:
data[ATTR_MEDIA_ENQUEUE] = enqueue
hass.services.call(DOMAIN, SERVICE_PLAY_MEDIA, data)
@bind_hass
def select_source(hass, source, entity_id=None):
"""Send the media player the command to select input source."""
data = {ATTR_INPUT_SOURCE: source}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SELECT_SOURCE, data)
@bind_hass
def clear_playlist(hass, entity_id=None):
"""Send the media player the command for clear playlist."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_CLEAR_PLAYLIST, data)
@bind_hass
def set_shuffle(hass, shuffle, entity_id=None):
"""Send the media player the command to enable/disable shuffle mode."""
data = {ATTR_MEDIA_SHUFFLE: shuffle}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SHUFFLE_SET, data)
@asyncio.coroutine
def async_setup(hass, config):
"""Track states and offer events for media_players."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
hass.http.register_view(MediaPlayerImageView(component.entities))
yield from component.async_setup(config)
descriptions = yield from hass.async_add_job(
load_yaml_config_file, os.path.join(
os.path.dirname(__file__), 'services.yaml'))
@asyncio.coroutine
def async_service_handler(service):
"""Map services to methods on MediaPlayerDevice."""
method = SERVICE_TO_METHOD.get(service.service)
if not method:
return
params = {}
if service.service == SERVICE_VOLUME_SET:
params['volume'] = service.data.get(ATTR_MEDIA_VOLUME_LEVEL)
elif service.service == SERVICE_VOLUME_MUTE:
params['mute'] = service.data.get(ATTR_MEDIA_VOLUME_MUTED)
elif service.service == SERVICE_MEDIA_SEEK:
params['position'] = service.data.get(ATTR_MEDIA_SEEK_POSITION)
elif service.service == SERVICE_SELECT_SOURCE:
params['source'] = service.data.get(ATTR_INPUT_SOURCE)
elif service.service == SERVICE_PLAY_MEDIA:
params['media_type'] = \
service.data.get(ATTR_MEDIA_CONTENT_TYPE)
params['media_id'] = service.data.get(ATTR_MEDIA_CONTENT_ID)
params[ATTR_MEDIA_ENQUEUE] = \
service.data.get(ATTR_MEDIA_ENQUEUE)
elif service.service == SERVICE_SHUFFLE_SET:
params[ATTR_MEDIA_SHUFFLE] = \
service.data.get(ATTR_MEDIA_SHUFFLE)
target_players = component.async_extract_from_service(service)
update_tasks = []
for player in target_players:
yield from getattr(player, method['method'])(**params)
for player in target_players:
if not player.should_poll:
continue
update_coro = player.async_update_ha_state(True)
if hasattr(player, 'async_update'):
update_tasks.append(update_coro)
else:
yield from update_coro
if update_tasks:
yield from asyncio.wait(update_tasks, loop=hass.loop)
for service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[service].get(
'schema', MEDIA_PLAYER_SCHEMA)
hass.services.async_register(
DOMAIN, service, async_service_handler,
descriptions.get(service), schema=schema)
return True
class MediaPlayerDevice(Entity):
"""ABC for media player devices."""
_access_token = None
# pylint: disable=no-self-use
# Implement these for your media player
@property
def state(self):
"""State of the player."""
return STATE_UNKNOWN
@property
def access_token(self):
"""Access token for this media player."""
if self._access_token is None:
self._access_token = hashlib.sha256(
_RND.getrandbits(256).to_bytes(32, 'little')).hexdigest()
return self._access_token
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return None
@property
def media_content_type(self):
"""Content type of current playing media."""
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return None
@property
def media_position(self):
"""Position of current playing media in seconds."""
return None
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid.
Returns value from homeassistant.util.dt.utcnow().
"""
return None
@property
def media_image_url(self):
"""Image url of current playing media."""
return None
@property
def media_image_hash(self):
"""Hash value for media image."""
url = self.media_image_url
if url is not None:
return hashlib.md5(url.encode('utf-8')).hexdigest()[:5]
return None
@asyncio.coroutine
def async_get_media_image(self):
"""Fetch media image of current playing image."""
url = self.media_image_url
if url is None:
return None, None
return (yield from _async_fetch_image(self.hass, url))
@property
def media_title(self):
"""Title of current playing media."""
return None
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return None
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return None
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return None
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return None
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only."""
return None
@property
def media_season(self):
"""Season of current playing media, TV show only."""
return None
@property
def media_episode(self):
"""Episode of current playing media, TV show only."""
return None
@property
def media_channel(self):
"""Channel currently playing."""
return None
@property
def media_playlist(self):
"""Title of Playlist currently playing."""
return None
@property
def app_id(self):
"""ID of the current running app."""
return None
@property
def app_name(self):
"""Name of the current running app."""
return None
@property
def source(self):
"""Name of the current input source."""
return None
@property
def source_list(self):
"""List of available input sources."""
return None
@property
def shuffle(self):
"""Boolean if shuffle is enabled."""
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
return 0
def turn_on(self):
"""Turn the media player on."""
raise NotImplementedError()
def async_turn_on(self):
"""Turn the media player on.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_on)
def turn_off(self):
"""Turn the media player off."""
raise NotImplementedError()
def async_turn_off(self):
"""Turn the media player off.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_off)
def mute_volume(self, mute):
"""Mute the volume."""
raise NotImplementedError()
def async_mute_volume(self, mute):
"""Mute the volume.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.mute_volume, mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
raise NotImplementedError()
def async_set_volume_level(self, volume):
"""Set volume level, range 0..1.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_volume_level, volume)
def media_play(self):
"""Send play commmand."""
raise NotImplementedError()
def async_media_play(self):
"""Send play commmand.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_play)
def media_pause(self):
"""Send pause command."""
raise NotImplementedError()
def async_media_pause(self):
"""Send pause command.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_pause)
def media_stop(self):
"""Send stop command."""
raise NotImplementedError()
def async_media_stop(self):
"""Send stop command.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_stop)
def media_previous_track(self):
"""Send previous track command."""
raise NotImplementedError()
def async_media_previous_track(self):
"""Send previous track command.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_previous_track)
def media_next_track(self):
"""Send next track command."""
raise NotImplementedError()
def async_media_next_track(self):
"""Send next track command.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_next_track)
def media_seek(self, position):
"""Send seek command."""
raise NotImplementedError()
def async_media_seek(self, position):
"""Send seek command.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.media_seek, position)
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
raise NotImplementedError()
def async_play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(
ft.partial(self.play_media, media_type, media_id, **kwargs))
def select_source(self, source):
"""Select input source."""
raise NotImplementedError()
def async_select_source(self, source):
"""Select input source.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.select_source, source)
def clear_playlist(self):
"""Clear players playlist."""
raise NotImplementedError()
def async_clear_playlist(self):
"""Clear players playlist.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.clear_playlist)
def set_shuffle(self, shuffle):
"""Enable/disable shuffle mode."""
raise NotImplementedError()
def async_set_shuffle(self, shuffle):
"""Enable/disable shuffle mode.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_shuffle, shuffle)
# No need to overwrite these.
@property
def support_play(self):
"""Boolean if play is supported."""
return bool(self.supported_features & SUPPORT_PLAY)
@property
def support_pause(self):
"""Boolean if pause is supported."""
return bool(self.supported_features & SUPPORT_PAUSE)
@property
def support_stop(self):
"""Boolean if stop is supported."""
return bool(self.supported_features & SUPPORT_STOP)
@property
def support_seek(self):
"""Boolean if seek is supported."""
return bool(self.supported_features & SUPPORT_SEEK)
@property
def support_volume_set(self):
"""Boolean if setting volume is supported."""
return bool(self.supported_features & SUPPORT_VOLUME_SET)
@property
def support_volume_mute(self):
"""Boolean if muting volume is supported."""
return bool(self.supported_features & SUPPORT_VOLUME_MUTE)
@property
def support_previous_track(self):
"""Boolean if previous track command supported."""
return bool(self.supported_features & SUPPORT_PREVIOUS_TRACK)
@property
def support_next_track(self):
"""Boolean if next track command supported."""
return bool(self.supported_features & SUPPORT_NEXT_TRACK)
@property
def support_play_media(self):
"""Boolean if play media command supported."""
return bool(self.supported_features & SUPPORT_PLAY_MEDIA)
@property
def support_select_source(self):
"""Boolean if select source command supported."""
return bool(self.supported_features & SUPPORT_SELECT_SOURCE)
@property
def support_clear_playlist(self):
"""Boolean if clear playlist command supported."""
return bool(self.supported_features & SUPPORT_CLEAR_PLAYLIST)
@property
def support_shuffle_set(self):
"""Boolean if shuffle is supported."""
return bool(self.supported_features & SUPPORT_SHUFFLE_SET)
def async_toggle(self):
"""Toggle the power on the media player.
This method must be run in the event loop and returns a coroutine.
"""
if hasattr(self, 'toggle'):
# pylint: disable=no-member
return self.hass.async_add_job(self.toggle)
if self.state in [STATE_OFF, STATE_IDLE]:
return self.async_turn_on()
return self.async_turn_off()
@asyncio.coroutine
def async_volume_up(self):
"""Turn volume up for media player.
This method is a coroutine.
"""
if hasattr(self, 'volume_up'):
# pylint: disable=no-member
yield from self.hass.async_add_job(self.volume_up)
return
if self.volume_level < 1:
yield from self.async_set_volume_level(
min(1, self.volume_level + .1))
@asyncio.coroutine
def async_volume_down(self):
"""Turn volume down for media player.
This method is a coroutine.
"""
if hasattr(self, 'volume_down'):
# pylint: disable=no-member
yield from self.hass.async_add_job(self.volume_down)
return
if self.volume_level > 0:
yield from self.async_set_volume_level(
max(0, self.volume_level - .1))
def async_media_play_pause(self):
"""Play or pause the media player.
This method must be run in the event loop and returns a coroutine.
"""
if hasattr(self, 'media_play_pause'):
# pylint: disable=no-member
return self.hass.async_add_job(self.media_play_pause)
if self.state == STATE_PLAYING:
return self.async_media_pause()
return self.async_media_play()
@property
def entity_picture(self):
"""Return image of the media playing."""
if self.state == STATE_OFF:
return None
image_hash = self.media_image_hash
if image_hash is None:
return None
return ENTITY_IMAGE_URL.format(
self.entity_id, self.access_token, image_hash)
@property
def state_attributes(self):
"""Return the state attributes."""
if self.state == STATE_OFF:
return None
state_attr = {
attr: getattr(self, attr) for attr
in ATTR_TO_PROPERTY if getattr(self, attr) is not None
}
return state_attr
def preload_media_image_url(self, url):
"""Preload and cache a media image for future use."""
run_coroutine_threadsafe(
_async_fetch_image(self.hass, url), self.hass.loop
).result()
@asyncio.coroutine
def _async_fetch_image(hass, url):
"""Fetch image.
Images are cached in memory (the images are typically 10-100kB in size).
"""
cache_images = ENTITY_IMAGE_CACHE[ATTR_CACHE_IMAGES]
cache_urls = ENTITY_IMAGE_CACHE[ATTR_CACHE_URLS]
cache_maxsize = ENTITY_IMAGE_CACHE[ATTR_CACHE_MAXSIZE]
if url in cache_images:
return cache_images[url]
content, content_type = (None, None)
websession = async_get_clientsession(hass)
try:
with async_timeout.timeout(10, loop=hass.loop):
response = yield from websession.get(url)
if response.status == 200:
content = yield from response.read()
content_type = response.headers.get(CONTENT_TYPE_HEADER)
if content_type:
content_type = content_type.split(';')[0]
except asyncio.TimeoutError:
pass
if not content:
return (None, None)
cache_images[url] = (content, content_type)
cache_urls.append(url)
while len(cache_urls) > cache_maxsize:
# remove oldest item from cache
oldest_url = cache_urls[0]
if oldest_url in cache_images:
del cache_images[oldest_url]
cache_urls = cache_urls[1:]
return content, content_type
class MediaPlayerImageView(HomeAssistantView):
"""Media player view to serve an image."""
requires_auth = False
url = '/api/media_player_proxy/{entity_id}'
name = 'api:media_player:image'
def __init__(self, entities):
"""Initialize a media player view."""
self.entities = entities
@asyncio.coroutine
def get(self, request, entity_id):
"""Start a get request."""
player = self.entities.get(entity_id)
if player is None:
status = 404 if request[KEY_AUTHENTICATED] else 401
return web.Response(status=status)
authenticated = (request[KEY_AUTHENTICATED] or
request.query.get('token') == player.access_token)
if not authenticated:
return web.Response(status=401)
data, content_type = yield from player.async_get_media_image()
if data is None:
return web.Response(status=500)
return web.Response(body=data, content_type=content_type)
| apache-2.0 |
matiasb/django | django/views/generic/dates.py | 212 | 25790 | from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils import timezone
from django.utils.encoding import force_str, force_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
class YearMixin(object):
"""
Mixin for views manipulating year-based data.
"""
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"""
Return the year for which this view should display data.
"""
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_("No year specified"))
return year
def get_next_year(self, date):
"""
Get the next valid year.
"""
return _get_next_prev(self, date, is_previous=False, period='year')
def get_previous_year(self, date):
"""
Get the previous valid year.
"""
return _get_next_prev(self, date, is_previous=True, period='year')
def _get_next_year(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date.replace(year=date.year + 1, month=1, day=1)
def _get_current_year(self, date):
"""
Return the start date of the current interval.
"""
return date.replace(month=1, day=1)
class MonthMixin(object):
"""
Mixin for views manipulating month-based data.
"""
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"""
Return the month for which this view should display data.
"""
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_("No month specified"))
return month
def get_next_month(self, date):
"""
Get the next valid month.
"""
return _get_next_prev(self, date, is_previous=False, period='month')
def get_previous_month(self, date):
"""
Get the previous valid month.
"""
return _get_next_prev(self, date, is_previous=True, period='month')
def _get_next_month(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
if date.month == 12:
return date.replace(year=date.year + 1, month=1, day=1)
else:
return date.replace(month=date.month + 1, day=1)
def _get_current_month(self, date):
"""
Return the start date of the previous interval.
"""
return date.replace(day=1)
class DayMixin(object):
"""
Mixin for views manipulating day-based data.
"""
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"""
Return the day for which this view should display data.
"""
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_("No day specified"))
return day
def get_next_day(self, date):
"""
Get the next valid day.
"""
return _get_next_prev(self, date, is_previous=False, period='day')
def get_previous_day(self, date):
"""
Get the previous valid day.
"""
return _get_next_prev(self, date, is_previous=True, period='day')
def _get_next_day(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=1)
def _get_current_day(self, date):
"""
Return the start date of the current interval.
"""
return date
class WeekMixin(object):
"""
Mixin for views manipulating week-based data.
"""
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"""
Return the week for which this view should display data
"""
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_("No week specified"))
return week
def get_next_week(self, date):
"""
Get the next valid week.
"""
return _get_next_prev(self, date, is_previous=False, period='week')
def get_previous_week(self, date):
"""
Get the previous valid week.
"""
return _get_next_prev(self, date, is_previous=True, period='week')
def _get_next_week(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=7 - self._get_weekday(date))
def _get_current_week(self, date):
"""
Return the start date of the current interval.
"""
return date - datetime.timedelta(self._get_weekday(date))
def _get_weekday(self, date):
"""
Return the weekday for a given date.
The first day according to the week format is 0 and the last day is 6.
"""
week_format = self.get_week_format()
if week_format == '%W': # week starts on Monday
return date.weekday()
elif week_format == '%U': # week starts on Sunday
return (date.weekday() + 1) % 7
else:
raise ValueError("unknown week format: %s" % week_format)
class DateMixin(object):
"""
Mixin class for views manipulating date-based data.
"""
date_field = None
allow_future = False
def get_date_field(self):
"""
Get the name of the date field to be used to filter by.
"""
if self.date_field is None:
raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Returns `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
@cached_property
def uses_datetime_field(self):
"""
Return `True` if the date field is a `DateTimeField` and `False`
if it's a `DateField`.
"""
model = self.get_queryset().model if self.model is None else self.model
field = model._meta.get_field(self.get_date_field())
return isinstance(field, models.DateTimeField)
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value, timezone.get_current_timezone())
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""
Abstract base class for date-based views displaying a list of objects.
"""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(object_list=self.object_list,
date_list=self.date_list)
context.update(extra_context)
return self.render_to_response(context)
def get_dated_items(self):
"""
Obtain the list of dates and items.
"""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Returns the field or fields to use for ordering the queryset; uses the
date field by default.
"""
return '-%s' % self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = timezone.now() if self.uses_datetime_field else timezone_today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or 'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
name = force_text(queryset.model._meta.verbose_name_plural)
raise Http404(_("No %(verbose_name_plural)s available") %
{'verbose_name_plural': name})
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items.
Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""
Top-level archive of date-based items.
"""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""
List of objects published in a given year.
"""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""
List of objects published in a given year.
"""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""
List of objects published in a given month.
"""
date_list_period = 'day'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""
List of objects published in a given month.
"""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""
List of objects published in a given week.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""
List of objects published in a given week.
"""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""
List of objects published on a given day.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""
List of objects published on a given day.
"""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""
List of objects published today.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""
List of objects published today.
"""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""
Get the object this request displays.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False.") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
},
)
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super(BaseDetailView, self).get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Helper: get a datetime.date object given a format string and a year,
month, and day (only year is mandatory). Raise a 404 for an invalid date.
"""
format = delim.join((year_format, month_format, day_format))
datestr = delim.join((year, month, day))
try:
return datetime.datetime.strptime(force_str(datestr), format).date()
except ValueError:
raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Helper: Get the next or the previous valid date. The idea is to allow
links on month/day views to never be 404s by never providing a date
that'll be invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= timezone_today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = timezone.now()
else:
now = timezone_today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = timezone.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
def timezone_today():
"""
Return the current date in the current time zone.
"""
if settings.USE_TZ:
return timezone.localtime(timezone.now()).date()
else:
return datetime.date.today()
| bsd-3-clause |
Tomtomgo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/closebugforlanddiff.py | 126 | 2680 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.tool.comments import bug_comment_from_commit_text
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
_log = logging.getLogger(__name__)
class CloseBugForLandDiff(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.close_bug,
]
def run(self, state):
comment_text = bug_comment_from_commit_text(self._tool.scm(), state["commit_text"])
bug_id = state.get("bug_id")
if not bug_id and state.get("patch"):
bug_id = state.get("patch").bug_id()
if bug_id:
_log.info("Updating bug %s" % bug_id)
if self._options.close_bug:
self._tool.bugs.close_bug_as_fixed(bug_id, comment_text)
else:
# FIXME: We should a smart way to figure out if the patch is attached
# to the bug, and if so obsolete it.
self._tool.bugs.post_comment_to_bug(bug_id, comment_text)
else:
_log.info(comment_text)
_log.info("No bug id provided.")
| bsd-3-clause |
bdh1011/wau | venv/lib/python2.7/site-packages/pygments/lexers/objective.py | 43 | 22562 | # -*- coding: utf-8 -*-
"""
pygments.lexers.objective
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Objective-C family languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
inherit, default
from pygments.token import Text, Keyword, Name, String, Operator, \
Number, Punctuation, Literal, Comment
from pygments.lexers.c_cpp import CLexer, CppLexer
__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
def objective(baselexer):
"""
Generate a subclass of baselexer that accepts the Objective-C syntax
extensions.
"""
# Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
# since that's quite common in ordinary C/C++ files. It's OK to match
# JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
#
# The upshot of this is that we CANNOT match @class or @interface
_oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
# Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
# (note the identifier is *optional* when there is a ':'!)
_oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
r'(?:[a-zA-Z_]\w*\s*\]|'
r'(?:[a-zA-Z_]\w*)?:)')
class GeneratedObjectiveCVariant(baselexer):
"""
Implements Objective-C syntax on top of an existing C family lexer.
"""
tokens = {
'statements': [
(r'@"', String, 'string'),
(r'@(YES|NO)', Number),
(r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
(r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
(r'@0[0-7]+[Ll]?', Number.Oct),
(r'@\d+[Ll]?', Number.Integer),
(r'@\(', Literal, 'literal_number'),
(r'@\[', Literal, 'literal_array'),
(r'@\{', Literal, 'literal_dictionary'),
(words((
'@selector', '@private', '@protected', '@public', '@encode',
'@synchronized', '@try', '@throw', '@catch', '@finally',
'@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
'__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
'out', 'inout', 'release', 'class', '@dynamic', '@optional',
'@required', '@autoreleasepool'), suffix=r'\b'),
Keyword),
(words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
Keyword.Type),
(r'@(true|false|YES|NO)\n', Name.Builtin),
(r'(YES|NO|nil|self|super)\b', Name.Builtin),
# Carbon types
(r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
# Carbon built-ins
(r'(TRUE|FALSE)\b', Name.Builtin),
(r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
('#pop', 'oc_classname')),
(r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
('#pop', 'oc_forward_classname')),
# @ can also prefix other expressions like @{...} or @(...)
(r'@', Punctuation),
inherit,
],
'oc_classname': [
# interface definition that inherits
('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
('#pop', 'oc_ivars')),
('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
bygroups(Name.Class, Text, Name.Class), '#pop'),
# interface definition for a category
('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
('#pop', 'oc_ivars')),
('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
bygroups(Name.Class, Text, Name.Label), '#pop'),
# simple interface / implementation
('([a-zA-Z$_][\w$]*)(\s*)(\{)',
bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
],
'oc_forward_classname': [
('([a-zA-Z$_][\w$]*)(\s*,\s*)',
bygroups(Name.Class, Text), 'oc_forward_classname'),
('([a-zA-Z$_][\w$]*)(\s*;?)',
bygroups(Name.Class, Text), '#pop')
],
'oc_ivars': [
include('whitespace'),
include('statements'),
(';', Punctuation),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
'root': [
# methods
(r'^([-+])(\s*)' # method marker
r'(\(.*?\))?(\s*)' # return type
r'([a-zA-Z$_][\w$]*:?)', # begin of method name
bygroups(Punctuation, Text, using(this),
Text, Name.Function),
'method'),
inherit,
],
'method': [
include('whitespace'),
# TODO unsure if ellipses are allowed elsewhere, see
# discussion in Issue 789
(r',', Punctuation),
(r'\.\.\.', Punctuation),
(r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
bygroups(using(this), Text, Name.Variable)),
(r'[a-zA-Z$_][\w$]*:', Name.Function),
(';', Punctuation, '#pop'),
(r'\{', Punctuation, 'function'),
default('#pop'),
],
'literal_number': [
(r'\(', Punctuation, 'literal_number_inner'),
(r'\)', Literal, '#pop'),
include('statement'),
],
'literal_number_inner': [
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop'),
include('statement'),
],
'literal_array': [
(r'\[', Punctuation, 'literal_array_inner'),
(r'\]', Literal, '#pop'),
include('statement'),
],
'literal_array_inner': [
(r'\[', Punctuation, '#push'),
(r'\]', Punctuation, '#pop'),
include('statement'),
],
'literal_dictionary': [
(r'\}', Literal, '#pop'),
include('statement'),
],
}
def analyse_text(text):
if _oc_keywords.search(text):
return 1.0
elif '@"' in text: # strings
return 0.8
elif re.search('@[0-9]+', text):
return 0.7
elif _oc_message.search(text):
return 0.8
return 0
def get_tokens_unprocessed(self, text):
from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
COCOA_PROTOCOLS, COCOA_PRIMITIVES
for index, token, value in \
baselexer.get_tokens_unprocessed(self, text):
if token is Name or token is Name.Class:
if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
or value in COCOA_PRIMITIVES:
token = Name.Builtin.Pseudo
yield index, token, value
return GeneratedObjectiveCVariant
class ObjectiveCLexer(objective(CLexer)):
"""
For Objective-C source code with preprocessor directives.
"""
name = 'Objective-C'
aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
filenames = ['*.m', '*.h']
mimetypes = ['text/x-objective-c']
priority = 0.05 # Lower than C
class ObjectiveCppLexer(objective(CppLexer)):
"""
For Objective-C++ source code with preprocessor directives.
"""
name = 'Objective-C++'
aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
filenames = ['*.mm', '*.hh']
mimetypes = ['text/x-objective-c++']
priority = 0.05 # Lower than C++
class LogosLexer(ObjectiveCppLexer):
"""
For Logos + Objective-C source code with preprocessor directives.
.. versionadded:: 1.6
"""
name = 'Logos'
aliases = ['logos']
filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
mimetypes = ['text/x-logos']
priority = 0.25
tokens = {
'statements': [
(r'(%orig|%log)\b', Keyword),
(r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
(r'(%init)\b(\()',
bygroups(Keyword, Punctuation), 'logos_init_directive'),
(r'(%init)(?=\s*;)', bygroups(Keyword)),
(r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
bygroups(Keyword, Text, Name.Class), '#pop'),
(r'(%subclass)(\s+)', bygroups(Keyword, Text),
('#pop', 'logos_classname')),
inherit,
],
'logos_init_directive': [
('\s+', Text),
(',', Punctuation, ('logos_init_directive', '#pop')),
('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
bygroups(Name.Class, Text, Punctuation, Text, Text)),
('([a-zA-Z$_][\w$]*)', Name.Class),
('\)', Punctuation, '#pop'),
],
'logos_classname': [
('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
bygroups(Name.Class, Text, Name.Class), '#pop'),
('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
],
'root': [
(r'(%subclass)(\s+)', bygroups(Keyword, Text),
'logos_classname'),
(r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
bygroups(Keyword, Text, Name.Class)),
(r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
(r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
'function'),
(r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
bygroups(Keyword, Text, Keyword, String, Keyword)),
(r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
inherit,
],
}
_logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
def analyse_text(text):
if LogosLexer._logos_keywords.search(text):
return 1.0
return 0
class SwiftLexer(RegexLexer):
"""
For `Swift <https://developer.apple.com/swift/>`_ source.
.. versionadded:: 2.0
"""
name = 'Swift'
filenames = ['*.swift']
aliases = ['swift']
mimetypes = ['text/x-swift']
tokens = {
'root': [
# Whitespace and Comments
(r'\n', Text),
(r'\s+', Text),
(r'//', Comment.Single, 'comment-single'),
(r'/\*', Comment.Multiline, 'comment-multi'),
(r'#(if|elseif|else|endif)\b', Comment.Preproc, 'preproc'),
# Keywords
include('keywords'),
# Global Types
(words((
'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
'EnumerateSequence', 'FilterCollectionView',
'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
'LazyForwardCollection', 'LazyRandomAccessCollection',
'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
'Optional', 'PermutationGenerator', 'QuickLookObject',
'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
# Protocols
'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
'BidirectionalIndexType', 'BitwiseOperationsType',
'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
'CollectionType', 'Comparable', 'DebugPrintable',
'DictionaryLiteralConvertible', 'Equatable',
'ExtendedGraphemeClusterLiteralConvertible',
'ExtensibleCollectionType', 'FloatLiteralConvertible',
'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
'NilLiteralConvertible', 'OutputStreamType', 'Printable',
'RandomAccessIndexType', 'RangeReplaceableCollectionType',
'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
'Streamable', 'Strideable', 'StringInterpolationConvertible',
'StringLiteralConvertible', 'UnicodeCodecType',
'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
'_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
'_CollectionType', '_Comparable', '_ExtensibleCollectionType',
'_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
'_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
'_RawOptionSetType', '_SequenceType', '_Sequence_Type',
'_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
'_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
'_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
'_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
'_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
'_SwiftNSStringType', '_UnsignedIntegerType',
# Variables
'C_ARGC', 'C_ARGV', 'Process',
# Typealiases
'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
# Foundation/Cocoa
'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
Name.Builtin),
# Functions
(words((
'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
'withExtendedLifetime', 'withUnsafeMutablePointer',
'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
'withVaList'), suffix=r'\b'),
Name.Builtin.Pseudo),
# Implicit Block Variables
(r'\$\d+', Name.Variable),
# Binary Literal
(r'0b[01_]+', Number.Bin),
# Octal Literal
(r'0o[0-7_]+', Number.Oct),
# Hexadecimal Literal
(r'0x[0-9a-fA-F_]+', Number.Hex),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
(r'[0-9][0-9_]*', Number.Integer),
# String Literal
(r'"', String, 'string'),
# Operators and Punctuation
(r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
(r'[/=\-+!*%<>&|^?~]+', Operator),
# Identifier
(r'[a-zA-Z_]\w*', Name)
],
'keywords': [
(words((
'break', 'case', 'continue', 'default', 'do', 'else',
'fallthrough', 'for', 'if', 'in', 'return', 'switch', 'where',
'while'), suffix=r'\b'),
Keyword),
(r'@availability\([^)]+\)', Keyword.Reserved),
(words((
'associativity', 'convenience', 'dynamic', 'didSet', 'final',
'get', 'infix', 'inout', 'lazy', 'left', 'mutating', 'none',
'nonmutating', 'optional', 'override', 'postfix', 'precedence',
'prefix', 'Protocol', 'required', 'right', 'set', 'Type',
'unowned', 'weak', 'willSet', '@availability', '@autoclosure',
'@noreturn', '@NSApplicationMain', '@NSCopying', '@NSManaged',
'@objc', '@UIApplicationMain', '@IBAction', '@IBDesignable',
'@IBInspectable', '@IBOutlet'), suffix=r'\b'),
Keyword.Reserved),
(r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
r'|__FILE__|__FUNCTION__|__LINE__|_)\b', Keyword.Constant),
(r'import\b', Keyword.Declaration, 'module'),
(r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(func)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Function)),
(r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
Text, Name.Variable)),
(words((
'class', 'deinit', 'enum', 'extension', 'func', 'import', 'init',
'internal', 'let', 'operator', 'private', 'protocol', 'public',
'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
Keyword.Declaration)
],
'comment': [
(r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
Comment.Special)
],
# Nested
'comment-single': [
(r'\n', Text, '#pop'),
include('comment'),
(r'[^\n]', Comment.Single)
],
'comment-multi': [
include('comment'),
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'module': [
(r'\n', Text, '#pop'),
(r'[a-zA-Z_]\w*', Name.Class),
include('root')
],
'preproc': [
(r'\n', Text, '#pop'),
include('keywords'),
(r'[A-Za-z]\w*', Comment.Preproc),
include('root')
],
'string': [
(r'\\\(', String.Interpol, 'string-intp'),
(r'"', String, '#pop'),
(r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
(r'[^\\"]+', String),
(r'\\', String)
],
'string-intp': [
(r'\(', String.Interpol, '#push'),
(r'\)', String.Interpol, '#pop'),
include('root')
]
}
def get_tokens_unprocessed(self, text):
from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
COCOA_PROTOCOLS, COCOA_PRIMITIVES
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
if token is Name or token is Name.Class:
if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
or value in COCOA_PRIMITIVES:
token = Name.Builtin.Pseudo
yield index, token, value
| mit |
cidadania/e-cidadania | tests/functional_utils.py | 2 | 2250 | #/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from django.test import LiveServerTestCase
from tests.test_utils import ECDTestCase
class FunctionalTestCase(ECDTestCase, LiveServerTestCase):
"""
Class which provides functional testing capabilities. It subclasses both
our custom ECDTestCase and django's LiveServerTestCase. LiveServerTestCase
was introduced in Django 1.4 to support functional testing.
"""
def init(self):
ECDTestCase.init(self)
self.browser = webdriver.Firefox()
def setUp(self):
"""
Setup done prior to a test run.
"""
self.init()
def tearDown(self):
"""
Actions taken after a test run.
"""
self.browser.quit()
def wait(self, sec):
"""
Halts script execution for `sec` seconds.
This is necessary because the script executes faster than the browser.
"""
time.sleep(sec)
return
def login(self, browser, username='test_user', password='test_password'):
"""
Logs into e-cidadania.
"""
username_field = browser.find_element_by_name('username')
username_field.send_keys(username)
password_field = browser.find_element_by_name('password')
password_field.send_keys(password)
self.wait(2)
password_field.send_keys(Keys.RETURN)
| apache-2.0 |
40223245/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/unittest/util.py | 794 | 4157 | """Various utility functions."""
from collections import namedtuple, OrderedDict
__unittest = True
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
def strclass(cls):
return "%s.%s" % (cls.__module__, cls.__name__)
def sorted_list_difference(expected, actual):
"""Finds elements in only one or the other of two, sorted input lists.
Returns a two-element tuple of lists. The first list contains those
elements in the "expected" list but not in the "actual" list, and the
second contains those elements in the "actual" list but not in the
"expected" list. Duplicate elements in either input list are ignored.
"""
i = j = 0
missing = []
unexpected = []
while True:
try:
e = expected[i]
a = actual[j]
if e < a:
missing.append(e)
i += 1
while expected[i] == e:
i += 1
elif e > a:
unexpected.append(a)
j += 1
while actual[j] == a:
j += 1
else:
i += 1
try:
while expected[i] == e:
i += 1
finally:
j += 1
while actual[j] == a:
j += 1
except IndexError:
missing.extend(expected[i:])
unexpected.extend(actual[j:])
break
return missing, unexpected
def unorderable_list_difference(expected, actual):
"""Same behavior as sorted_list_difference but
for lists of unorderable items (like dicts).
As it does a linear search per item (remove) it
has O(n*n) performance."""
missing = []
while expected:
item = expected.pop()
try:
actual.remove(item)
except ValueError:
missing.append(item)
# anything left in actual is unexpected
return missing, actual
def three_way_cmp(x, y):
"""Return -1 if x < y, 0 if x == y and 1 if x > y"""
return (x > y) - (x < y)
_Mismatch = namedtuple('Mismatch', 'actual expected value')
def _count_diff_all_purpose(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements need not be hashable
s, t = list(actual), list(expected)
m, n = len(s), len(t)
NULL = object()
result = []
for i, elem in enumerate(s):
if elem is NULL:
continue
cnt_s = cnt_t = 0
for j in range(i, m):
if s[j] == elem:
cnt_s += 1
s[j] = NULL
for j, other_elem in enumerate(t):
if other_elem == elem:
cnt_t += 1
t[j] = NULL
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for i, elem in enumerate(t):
if elem is NULL:
continue
cnt_t = 0
for j in range(i, n):
if t[j] == elem:
cnt_t += 1
t[j] = NULL
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
def _ordered_count(iterable):
'Return dict of element counts, in the order they were first seen'
c = OrderedDict()
for elem in iterable:
c[elem] = c.get(elem, 0) + 1
return c
def _count_diff_hashable(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements must be hashable
s, t = _ordered_count(actual), _ordered_count(expected)
result = []
for elem, cnt_s in s.items():
cnt_t = t.get(elem, 0)
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for elem, cnt_t in t.items():
if elem not in s:
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
| gpl-3.0 |
fbradyirl/home-assistant | homeassistant/components/totalconnect/alarm_control_panel.py | 2 | 4534 | """Interfaces with TotalConnect alarm control panels."""
import logging
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMING,
STATE_ALARM_TRIGGERED,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
)
from . import DOMAIN as TOTALCONNECT_DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an alarm control panel for a TotalConnect device."""
if discovery_info is None:
return
alarms = []
client = hass.data[TOTALCONNECT_DOMAIN].client
for location in client.locations:
location_id = location.get("LocationID")
name = location.get("LocationName")
alarms.append(TotalConnectAlarm(name, location_id, client))
add_entities(alarms)
class TotalConnectAlarm(alarm.AlarmControlPanel):
"""Represent an TotalConnect status."""
def __init__(self, name, location_id, client):
"""Initialize the TotalConnect status."""
self._name = name
self._location_id = location_id
self._client = client
self._state = None
self._device_state_attributes = {}
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._device_state_attributes
def update(self):
"""Return the state of the device."""
status = self._client.get_armed_status(self._name)
attr = {
"location_name": self._name,
"location_id": self._location_id,
"ac_loss": self._client.ac_loss,
"low_battery": self._client.low_battery,
"triggered_source": None,
"triggered_zone": None,
}
if status == self._client.DISARMED:
state = STATE_ALARM_DISARMED
elif status == self._client.DISARMED_BYPASS:
state = STATE_ALARM_DISARMED
elif status == self._client.ARMED_STAY:
state = STATE_ALARM_ARMED_HOME
elif status == self._client.ARMED_STAY_INSTANT:
state = STATE_ALARM_ARMED_HOME
elif status == self._client.ARMED_STAY_INSTANT_BYPASS:
state = STATE_ALARM_ARMED_HOME
elif status == self._client.ARMED_STAY_NIGHT:
state = STATE_ALARM_ARMED_NIGHT
elif status == self._client.ARMED_AWAY:
state = STATE_ALARM_ARMED_AWAY
elif status == self._client.ARMED_AWAY_BYPASS:
state = STATE_ALARM_ARMED_AWAY
elif status == self._client.ARMED_AWAY_INSTANT:
state = STATE_ALARM_ARMED_AWAY
elif status == self._client.ARMED_AWAY_INSTANT_BYPASS:
state = STATE_ALARM_ARMED_AWAY
elif status == self._client.ARMED_CUSTOM_BYPASS:
state = STATE_ALARM_ARMED_CUSTOM_BYPASS
elif status == self._client.ARMING:
state = STATE_ALARM_ARMING
elif status == self._client.DISARMING:
state = STATE_ALARM_DISARMING
elif status == self._client.ALARMING:
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Police/Medical"
elif status == self._client.ALARMING_FIRE_SMOKE:
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Fire/Smoke"
elif status == self._client.ALARMING_CARBON_MONOXIDE:
state = STATE_ALARM_TRIGGERED
attr["triggered_source"] = "Carbon Monoxide"
else:
logging.info(
"Total Connect Client returned unknown " "status code: %s", status
)
state = None
self._state = state
self._device_state_attributes = attr
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._client.disarm(self._name)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._client.arm_stay(self._name)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._client.arm_away(self._name)
def alarm_arm_night(self, code=None):
"""Send arm night command."""
self._client.arm_stay_night(self._name)
| apache-2.0 |
hp-sam/voip-client-ios | submodules/externals/libvpx/third_party/googletest/src/test/gtest_xml_output_unittest.py | 397 | 11279 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import errno
import os
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = "\nStack trace:\n*"
else:
STACK_TRACE_TEMPLATE = ""
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" name="AllTests">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" name="AllTests">
</testsuites>"""
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput("gtest_no_test_unittest",
EXPECTED_EMPTY_XML, 0)
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
"gtest_no_test_unittest")
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, "%s=xml" % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + "out.xml")
if os.path.isfile(xml_path):
os.remove(xml_path)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
command = [gtest_prog_path,
"%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path),
"--shut_down_xml"]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + "out.xml")
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = [gtest_prog_path, "%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path)]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, expected_exit_code))
expected = minidom.parseString(expected_xml)
actual = minidom.parse(xml_path)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual .unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| gpl-2.0 |
ChanduERP/odoo | openerp/addons/base/module/report/ir_module_reference_print.py | 384 | 3704 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
class ir_module_reference_print(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(ir_module_reference_print, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'findobj': self._object_find,
'objdoc': self._object_doc,
'objdoc2': self._object_doc2,
'findflds': self._fields_find,
})
def _object_doc(self, obj):
modobj = self.pool[obj]
strdocs= modobj.__doc__
if not strdocs:
return None
else:
strdocs=strdocs.strip().splitlines(True)
res = ''
for stre in strdocs:
if not stre or stre.isspace():
break
res += stre
return res
def _object_doc2(self, obj):
modobj = self.pool[obj]
strdocs= modobj.__doc__
if not strdocs:
return None
else:
strdocs=strdocs.strip().splitlines(True)
res = []
fou = False
for stre in strdocs:
if fou:
res.append(stre.strip())
elif not stre or stre.isspace():
fou = True
return res
def _object_find(self, module):
ids2 = self.pool['ir.model.data'].search(self.cr, self.uid, [('module','=',module), ('model','=','ir.model')])
ids = []
for mod in self.pool['ir.model.data'].browse(self.cr, self.uid, ids2):
ids.append(mod.res_id)
modobj = self.pool['ir.model']
return modobj.browse(self.cr, self.uid, ids)
def _fields_find(self, obj, module):
res = []
data_obj = self.pool['ir.model.data']
modobj = self.pool[obj]
fname_wildcard = 'field_' + modobj._name.replace('.', '_') + '_%'
module_fields_ids = data_obj.search(self.cr, self.uid, [('model', '=', 'ir.model.fields'), ('module', '=', module), ('name', 'like', fname_wildcard)])
if module_fields_ids:
module_fields_res_ids = [x['res_id'] for x in data_obj.read(self.cr, self.uid, module_fields_ids, ['res_id'])]
module_fields_names = [x['name'] for x in self.pool['ir.model.fields'].read(self.cr, self.uid, module_fields_res_ids, ['name'])]
res = modobj.fields_get(self.cr, self.uid, allfields=module_fields_names).items()
res.sort()
return res
report_sxw.report_sxw('report.ir.module.reference', 'ir.module.module',
'addons/base/module/report/ir_module_reference.rml',
parser=ir_module_reference_print, header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
marionleborgne/cloudbrain | src/cloudbrain/connectors/openbci.py | 5 | 8610 | import serial
import struct
import time
_SAMPLE_RATE = 250.0 # Hz
_START_BYTE = bytes(0xA0) # start of data packet
_END_BYTE = bytes(0xC0) # end of data packet
_ADS1299_Vref = 4.5 # reference voltage for ADC in ADS1299. Set by its hardware
_ADS1299_gain = 24.0 # assumed gain for ADS1299. Set by its Arduino code
_scale_fac_uVolts_per_count = _ADS1299_Vref / (pow(2, 23) - 1) / (
_ADS1299_gain * 1000000.)
class OpenBCIConnector(object):
"""
Handle a connection to an OpenBCI board.
Args:
port: The port to connect to.
baud: The baud of the serial connection.
"""
def __init__(self, port='/dev/tty.usbserial-DN0094CZ', baud=115200,
filter_data=True):
self.ser = serial.Serial(port, baud)
print("Serial established...")
# Initialize 32-bit board, doesn't affect 8bit board
self.ser.write('v')
# wait for device to be ready
time.sleep(1)
self.print_incoming_text()
self.streaming = False
self.filtering_data = filter_data
self.channels = 8
self.read_state = 0
def printBytesIn(self):
# DEBBUGING: Prints individual incoming bytes
if not self.streaming:
self.ser.write('b')
self.streaming = True
while self.streaming:
print(struct.unpack('B', self.ser.read())[0])
def start(self, callback_functions):
"""
Start handling streaming data from the board. Call a provided callback
for every single sample that is processed.
:param callback_functions: callback functions that will receive a single
argument of the OpenBCISample object captured.
"""
if not self.streaming:
self.ser.write('b')
self.streaming = True
while self.streaming:
sample = self._read_serial_binary()
for (metric, callback_function) in callback_functions.items():
callback_function(sample)
def stop(self):
"""
Turn streaming off without disconnecting from the board
"""
self.streaming = False
def disconnect(self):
self.ser.close()
self.streaming = False
def print_incoming_text(self):
"""
When starting the connection, print all the debug data until
we get to a line with the end sequence '$$$'.
"""
# Wait for device to send data
time.sleep(0.5)
if self.ser.inWaiting():
print("-------------------")
line = ''
c = ''
# Look for end sequence $$$
while '$$$' not in line:
c = self.ser.read()
line += c
print(line);
print("-------------------\n")
def enable_filters(self):
"""
Adds a filter at 60hz to cancel out ambient electrical noise.
"""
self.ser.write('f')
self.filtering_data = True
def disable_filters(self):
self.ser.write('g')
self.filtering_data = False
def warn(self, text):
print("Warning: {0}".format(text))
def _read_serial_binary(self, max_bytes_to_skip=3000):
"""
Parses incoming data packet into OpenBCISample.
Incoming Packet Structure:
Start Byte(1)|Sample ID(1)|Channel Data(24)|Aux Data(6)|End Byte(1)
0xA0|0-255|8, 3-byte signed ints|3 2-byte signed ints|0xC0
"""
def read(n):
b = self.ser.read(n)
# print bytes(b)
return b
for rep in xrange(max_bytes_to_skip):
# Looking for start and save id when found
if self.read_state == 0:
b = read(1)
if not b:
if not self.ser.inWaiting():
self.warn('Device appears to be stalled. Restarting...')
self.ser.write('b\n') # restart if it's stopped...
time.sleep(.100)
continue
if bytes(struct.unpack('B', b)[0]) == _START_BYTE:
if (rep != 0):
self.warn('Skipped %d bytes before start found' % (rep))
packet_id = struct.unpack('B', read(1))[
0] # packet id goes from 0-255
self.read_state = 1
elif self.read_state == 1:
channel_data = []
for c in xrange(self.channels):
# 3 byte ints
literal_read = read(3)
unpacked = struct.unpack('3B', literal_read)
# 3byte int in 2s compliment
if (unpacked[0] >= 127):
pre_fix = '\xFF'
else:
pre_fix = '\x00'
literal_read = pre_fix + literal_read;
# unpack little endian(>) signed integer(i)
# also makes unpacking platform independent
myInt = struct.unpack('>i', literal_read)
channel_data.append(myInt[0] * _scale_fac_uVolts_per_count)
self.read_state = 2
elif self.read_state == 2:
aux_data = []
for a in xrange(3):
# short(h)
acc = struct.unpack('h', read(2))[0]
aux_data.append(acc)
self.read_state = 3;
elif self.read_state == 3:
val = bytes(struct.unpack('B', read(1))[0])
if (val == _END_BYTE):
timestamp = int(time.time() * 1000000) # micro seconds
sample = OpenBCISample(packet_id, channel_data, aux_data, timestamp)
self.read_state = 0 # read next packet
return sample
else:
self.warn("Warning: Unexpected END_BYTE found <%s> instead of <%s>,\
discarted packet with id <%d>"
% (val, _END_BYTE, packet_id))
def test_signal(self, signal):
if signal == 0:
self.ser.write('0')
self.warn("Connecting all pins to ground")
elif signal == 1:
self.ser.write('p')
self.warn("Connecting all pins to Vcc")
elif signal == 2:
self.ser.write('-')
self.warn("Connecting pins to low frequency 1x amp signal")
elif signal == 3:
self.ser.write('=')
self.warn("Connecting pins to high frequency 1x amp signal")
elif signal == 4:
self.ser.write('[')
self.warn("Connecting pins to low frequency 2x amp signal")
elif signal == 5:
self.ser.write(']')
self.warn("Connecting pins to high frequency 2x amp signal")
else:
self.warn(
"%s is not a known test signal. Valid signals go from 0-5" % (signal))
def set_channel(self, channel, toggle_position):
# Commands to set toggle to on position
if toggle_position == 1:
if channel is 1:
self.ser.write('!')
if channel is 2:
self.ser.write('@')
if channel is 3:
self.ser.write('#')
if channel is 4:
self.ser.write('$')
if channel is 5:
self.ser.write('%')
if channel is 6:
self.ser.write('^')
if channel is 7:
self.ser.write('&')
if channel is 8:
self.ser.write('*')
# Commands to set toggle to off position
elif toggle_position == 0:
if channel is 1:
self.ser.write('1')
if channel is 2:
self.ser.write('2')
if channel is 3:
self.ser.write('3')
if channel is 4:
self.ser.write('4')
if channel is 5:
self.ser.write('5')
if channel is 6:
self.ser.write('6')
if channel is 7:
self.ser.write('7')
if channel is 8:
self.ser.write('8')
class OpenBCISample(object):
"""Object encapulsating a single sample from the OpenBCI board."""
def __init__(self, packet_id, channel_data, aux_data, timestamp):
self.id = packet_id
self.channel_data = channel_data
self.aux_data = aux_data
self.timestamp = timestamp
| agpl-3.0 |
rcharp/toyota-flask | numpy/numpy/distutils/fcompiler/sun.py | 62 | 1612 | from __future__ import division, absolute_import, print_function
from numpy.distutils.ccompiler import simple_version_match
from numpy.distutils.fcompiler import FCompiler
compilers = ['SunFCompiler']
class SunFCompiler(FCompiler):
compiler_type = 'sun'
description = 'Sun or Forte Fortran 95 Compiler'
# ex:
# f90: Sun WorkShop 6 update 2 Fortran 95 6.2 Patch 111690-10 2003/08/28
version_match = simple_version_match(
start=r'f9[05]: (Sun|Forte|WorkShop).*Fortran 95')
executables = {
'version_cmd' : ["<F90>", "-V"],
'compiler_f77' : ["f90"],
'compiler_fix' : ["f90", "-fixed"],
'compiler_f90' : ["f90"],
'linker_so' : ["<F90>", "-Bdynamic", "-G"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
module_dir_switch = '-moddir='
module_include_switch = '-M'
pic_flags = ['-xcode=pic32']
def get_flags_f77(self):
ret = ["-ftrap=%none"]
if (self.get_version() or '') >= '7':
ret.append("-f77")
else:
ret.append("-fixed")
return ret
def get_opt(self):
return ['-fast', '-dalign']
def get_arch(self):
return ['-xtarget=generic']
def get_libraries(self):
opt = []
opt.extend(['fsu', 'sunmath', 'mvec'])
return opt
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
compiler = new_fcompiler(compiler='sun')
compiler.customize()
print(compiler.get_version())
| apache-2.0 |
UCSC-nanopore-cgl/nanopore-RNN | nanotensor/alignedsignal.py | 1 | 29391 | #!/usr/bin/env python
"""Class and methods to deal with aligned signal to reference"""
########################################################################
# File: alignedsignal.py
# executable: alignedsignal.py
#
# Author: Andrew Bailey
# History: Created 03/09/18
########################################################################
import sys
import os
import subprocess
import numpy as np
from timeit import default_timer as timer
from collections import defaultdict, namedtuple
from py3helpers.utils import check_numpy_table
from py3helpers.seq_tools import ReferenceHandler, initialize_pysam_wrapper, ReverseComplement, get_minimap_alignment
from nanotensor.fast5 import Fast5
from nanotensor.mea_algorithm import maximum_expected_accuracy_alignment, mea_slow, \
mea_slower, create_random_prob_matrix, get_mea_params_from_events, match_events_with_signalalign
from nanotensor.event_detection import time_to_index
from itertools import islice
class AlignedSignal(object):
"""Labeled nanopore signal data"""
def __init__(self, scaled_signal):
"""Initialize the scaled signal and label
:param scaled_signal: scaled signal to pA
"""
self.scaled_signal = None
self.raw_signal = None
self._add_scaled_signal(scaled_signal)
self.signal_length = len(self.scaled_signal)
self.minus_strand = None
# label can be used for neural network training with all signal continuously labelled
self.label = defaultdict()
# predictions can have multiple labels for different sections of current
self.prediction = defaultdict()
# guides are sections that we are confident in (guide alignments)
self.guide = defaultdict()
def add_raw_signal(self, signal):
"""Add raw signal to class
:param signal: raw current signal in ADC counts
"""
assert int(signal[0]) == signal[0], "Raw signal are always integers"
assert len(signal) == len(self.scaled_signal) and len(signal) == self.signal_length, \
"Raw signal must be same size as scaled signal input:{} != scale:{}".format(signal, self.scaled_signal)
self.raw_signal = signal
def _add_scaled_signal(self, signal):
"""Add scaled signal to class
:param signal: normalized current signal to pA
"""
if type(signal) is np.ndarray:
signal = signal.tolist()
assert type(signal[0]) == float, "scaled signal must be a float"
self.scaled_signal = signal
def add_label(self, label, name, label_type):
"""Add labels to class.
:param label: label numpy array with required fields ['raw_start', 'raw_length', 'reference_index',
'kmer', 'posterior_probability']
:param name: name of the label for signal
:param label_type: type of label :['label', 'prediction', 'guide']
"""
assert label_type in ['label', 'prediction', 'guide'], \
"{} not in ['label', 'prediction', 'guide']: Must select an acceptable type".format(label_type)
check_numpy_table(label, req_fields=('raw_start', 'raw_length', 'reference_index',
'kmer', 'posterior_probability'))
# label.sort(order=['raw_start'], kind='mergesort')
# check the labels are in the correct format
assert min(label["raw_start"]) >= 0, "Raw start cannot be less than 0"
assert 0 <= max(label["posterior_probability"]) <= 1, \
"posterior_probability must be between zero and one {}".format(row["posterior_probability"])
# make sure last label can actually index the signal correctly
try:
self.scaled_signal[label[-1]["raw_start"]:label[-1]["raw_start"] + label[-1]["raw_length"]]
except IndexError:
raise IndexError("labels are longer than signal")
label1 = np.sort(label, order=['raw_start'], kind='mergesort')
# infer strand alignment of read
if label1[0]["reference_index"] >= label1[-1]["reference_index"]:
minus_strand = True
else:
minus_strand = False
if self.minus_strand is not None:
if label[0]["raw_start"] != label[-1]["raw_start"]:
assert self.minus_strand == minus_strand, "New label has different strand direction, check label"
else:
self.minus_strand = minus_strand
# set label with the specified name
if label_type == 'label':
self.label[name] = label
elif label_type == 'prediction':
self.prediction[name] = label
elif label_type == 'guide':
self.guide[name] = label
def generate_label_mapping(self, name, scaled=True):
"""Create a generator of the mapping between the signal and the label
:param name: name of mapping to create label mapping
:param scaled: boolean option for returning scaled or unscaled signal
"""
assert name in self.label.keys(), "{} is not in labels dataset: {}".format(name, self.label.keys())
label = self.label[name]
len_label = len(label)
if scaled:
signal = self.scaled_signal
else:
assert self.raw_signal is not None, "Must set raw signal in order to generate raw signal alignments"
signal = self.raw_signal
for i, segment in enumerate(label):
start = segment["raw_start"]
if i < len_label - 1:
end = label[i + 1]["raw_start"]
else:
end = segment["raw_start"] + segment["raw_length"]
yield signal[start:end], segment['kmer'], segment['posterior_probability'], segment['reference_index']
class CreateLabels(Fast5):
"""Create an Aligned Signal object from a fast5 file with """
def __init__(self, fast5_path):
"""Initialize fast5 object and keep track of AlignedSignal object"""
self.fast5_path = fast5_path
super(CreateLabels, self).__init__(fast5_path)
self.aligned_signal = self._initialize()
self.kmer_index = 2
self.rna = self.is_read_rna()
def _initialize(self):
"""Initialize AlignedSignal class.
Will create an AlignedSignal class with required fields filled out
:param fast5_path: path to fast5 file
"""
scaled_signal = self.get_read(raw=True, scale=True)
raw_signal = self.get_read(raw=True, scale=False)
# add raw signal information to AlignedSignal
aligned_signal = AlignedSignal(scaled_signal)
aligned_signal.add_raw_signal(raw_signal)
return aligned_signal
def add_mea_labels(self):
"""Gather mea_alignment labels information from fast5 file."""
# TODO call signalalign if not called
mea_alignment = self.get_signalalign_events(mea=True)
# rna reference positions are on 5' edge aka right side of kmer
if self.rna:
mea_alignment["reference_index"] -= self.kmer_index
else:
mea_alignment["reference_index"] += self.kmer_index
self.aligned_signal.add_label(mea_alignment, name="mea_signalalign", label_type='label')
return True
def add_signal_align_predictions(self):
"""Create prediction using probabilities from full output format from signalAlign"""
# TODO call signalalign if not called
sa_events = self.get_signalalign_events()
# cut out duplicates
sa_events = np.unique(sa_events)
events = self.get_resegment_basecall()
predictions = match_events_with_signalalign(sa_events=sa_events, event_detections=events)
# rna reference positions are on 5' edge aka right side of kmer
if self.rna:
predictions["reference_index"] -= self.kmer_index
else:
predictions["reference_index"] += self.kmer_index
self.aligned_signal.add_label(predictions, name="full_signalalign", label_type='prediction')
return True
def add_guide_alignment(self):
"""Add guide alignment labels to signal_label handle"""
test_sam = self.get_signalalign_events(sam=True)
events = self.get_resegment_basecall()
cigar_labels = create_labels_from_guide_alignment(events=events, sam_string=test_sam,
kmer_index=self.kmer_index)
for i, block in enumerate(cigar_labels):
# print(block)
self.aligned_signal.add_label(block, name="guide_alignment{}".format(i), label_type='guide')
return True
def add_nanoraw_labels(self, reference):
"""Add nanoraw labels to signal_label handle"""
# TODO call nanoraw from here
events, corr_start_rel_to_raw = self.get_corrected_events()
events["start"] += corr_start_rel_to_raw
sequence = ''.join([bytes.decode(x) for x in events['base']])
hit = get_minimap_alignment(reference, sequence, preset='map-ont')
cigar_label = np.zeros(len(sequence), dtype=[('raw_start', int), ('raw_length', int), ('reference_index', int),
('posterior_probability', float), ('kmer', 'S5')])
# assign labels
cigar_label['raw_start'] = events["start"]
cigar_label['raw_length'] = events["length"]
if hit.strand:
reference_map = list(range(hit.r_st, hit.r_en))[::-1]
else:
reference_map = list(range(hit.r_st, hit.r_en))
cigar_label['reference_index'] = reference_map
cigar_label['kmer'] = events['base']
cigar_label['posterior_probability'] = [1 for _ in range(hit.r_st, hit.r_en)]
self.aligned_signal.add_label(cigar_label, name="nanoraw", label_type='label')
return True
def add_eventalign_labels(self):
"""Add eventalign labels"""
section = "template"
# TODO call eventalign from here
ea_events = self.get_eventalign_events(section=section)
events = self.get_basecall_data(section=section)
sampling_freq = self.sample_rate
start_time = self.raw_attributes['start_time']
events = time_to_index(events, sampling_freq=sampling_freq, start_time=start_time)
lables = match_events_with_eventalign(events=ea_events, event_detections=events)
if self.rna:
lables["reference_index"] -= self.kmer_index
else:
lables["reference_index"] += self.kmer_index
self.aligned_signal.add_label(lables, name='eventAlign', label_type='label')
def create_labels_from_guide_alignment(events, sam_string, rna=False, reference_path=None, kmer_index=2,
one_ref_indexing=False):
"""Create labeled signal from a guide alignment with only matches being reported
:param events: path to fast5 file
:param sam_string: sam alignment string
:param rna: if read is rna, reverse again
:param reference_path: if sam_string has MDZ field the reference sequence can be inferred, otherwise, it is needed
:param kmer_index: index of the kmer to select for reference to event mapping
:param one_ref_indexing: boolean zero or 1 based indexing for reference
"""
# test if the required fields are in structured numpy array
check_numpy_table(events, req_fields=('raw_start', 'model_state', 'p_model_state', 'raw_length', 'move'))
assert type(one_ref_indexing) is bool, "one_ref_indexing must be a boolean"
psam_h = initialize_pysam_wrapper(sam_string, reference_path=reference_path)
# create an indexed map of the events and their corresponding bases
bases, base_raw_starts, base_raw_lengths, probs = index_bases_from_events(events, kmer_index=kmer_index)
# check if string mapped to reverse strand
if psam_h.alignment_segment.is_reverse:
probs = probs[::-1]
base_raw_starts = base_raw_starts[::-1]
# rna reads go 3' to 5' so we dont need to reverse if it mapped to reverse strand
if not rna:
bases = ReverseComplement().reverse(''.join(bases))
# reverse if it mapped to forward strand and RNA
elif rna:
bases = ReverseComplement().reverse(''.join(bases))
# all 'matches' and 'mismatches'
matches_map = psam_h.seq_alignment.matches_map
# zero indexed reference start
ref_start = psam_h.alignment_segment.reference_start + one_ref_indexing
# set labels
raw_start = []
raw_length = []
reference_index = []
kmer = []
posterior_probability = []
cigar_labels = []
prev = matches_map[0].reference_index
for i, alignment in enumerate(matches_map):
if i == 0 or alignment.reference_index == prev + 1:
raw_start.append(base_raw_starts[alignment.query_index])
raw_length.append(base_raw_lengths[alignment.query_index])
reference_index.append(alignment.reference_index + ref_start)
kmer.append(alignment.reference_base)
posterior_probability.append(probs[alignment.query_index])
else:
# initialize labels
cigar_label = np.zeros(len(raw_start),
dtype=[('raw_start', int), ('raw_length', int), ('reference_index', int),
('posterior_probability', float), ('kmer', 'S5')])
# assign labels
cigar_label['raw_start'] = raw_start
cigar_label['raw_length'] = raw_length
cigar_label['reference_index'] = reference_index
cigar_label['kmer'] = kmer
cigar_label['posterior_probability'] = posterior_probability
# add to other blocks
cigar_labels.append(cigar_label)
# reset trackers
raw_start = [base_raw_starts[alignment.query_index]]
raw_length = [base_raw_lengths[alignment.query_index]]
reference_index = [alignment.reference_index + ref_start]
kmer = [alignment.reference_base]
posterior_probability = [probs[alignment.query_index]]
# keep track of reference positions
prev = alignment.reference_index
# catch the last label
cigar_label = np.zeros(len(raw_start), dtype=[('raw_start', int), ('raw_length', int), ('reference_index', int),
('posterior_probability', float), ('kmer', 'S5')])
# assign labels
cigar_label['raw_start'] = raw_start
cigar_label['raw_length'] = raw_length
cigar_label['reference_index'] = reference_index
cigar_label['kmer'] = kmer
cigar_label['posterior_probability'] = posterior_probability
# add to other blocks
cigar_labels.append(cigar_label)
return cigar_labels
def index_bases_from_events(events, kmer_index=2):
"""Map basecalled sequence to events from a table with required fields
:param kmer_index: index of kmer to create map
:param events: original base-called events with required fields
"""
check_numpy_table(events, req_fields=('raw_start', 'model_state', 'p_model_state', 'raw_length', 'move'))
assert len(events[0]['model_state']) > kmer_index, \
"Selected too big of a kmer_index len(kmer) !> kmer_index, {} !> {} ".format(len(events[0]['model_state']),
kmer_index)
probs = []
base_raw_starts = []
bases = []
base_raw_lengths = []
for i, event in enumerate(events):
if i == 0:
# initialize with first kmer
base_raw_starts.extend([event['raw_start'] for _ in event['model_state'][:kmer_index + 1]])
probs.extend([event['p_model_state'] for _ in event['model_state'][:kmer_index + 1]])
bases.extend([chr(x) for x in event['model_state'][:kmer_index + 1]])
base_raw_lengths.extend([event['raw_length'] for _ in event['model_state'][:kmer_index + 1]])
else:
# if there was a move, gather the information for each base by index
if event['move'] > 0:
char_moves = bytes.decode(event['model_state'][kmer_index:kmer_index + event['move']])
for x in range(event['move']):
base_raw_starts.append(event['raw_start'])
probs.append(event['p_model_state'])
bases.append(char_moves[x])
base_raw_lengths.append(event['raw_length'])
# gather last bases for the last event
base_raw_starts.extend([event['raw_start'] for _ in event['model_state'][kmer_index + 1:]])
probs.extend([event['p_model_state'] for _ in event['model_state'][kmer_index + 1:]])
bases.extend([chr(x) for x in event['model_state'][kmer_index + 1:]])
base_raw_lengths.extend([event['raw_length'] for _ in event['model_state'][kmer_index + 1:]])
# the index of each corresponds to the index of the final sequence
return bases, base_raw_starts, base_raw_lengths, probs
def get_eventalign_events(fast5_dir, reference, output_dir, threads=1, overwrite=False):
"""Get nanopolish eventalign events"""
eventalign_output_path, eventalign_fofn_path = call_eventalign_script(fast5_dir,
reference,
output_dir,
threads=threads,
overwrite=overwrite)
fast5_files = []
with open(eventalign_fofn_path, 'r') as fofn:
for line in fofn:
fast5_files.append(line.split('\t')[1][:-1])
# gather event data
dtype = [('contig', 'S10'), ('position', int),
('reference_kmer', 'S6'), ('read_index', int),
('strand', 'S1'), ('event_index', int),
('event_level_mean', float), ('event_stdv', float),
('event_length', float), ('model_kmer', 'S6'),
('model_mean', float), ('model_stdv', float),
('standardized_level', float)]
with open(eventalign_output_path, 'r') as event_align:
read_number = 0
eventalign_data_template = []
eventalign_data_complement = []
event_align.readline()
for line in event_align:
data = line.split('\t')
if int(data[3]) != read_number:
t = np.array(eventalign_data_template, dtype=dtype)
c = np.array(eventalign_data_complement, dtype=dtype)
yield t, c, fast5_files[read_number]
read_number = int(data[3])
eventalign_data_template = []
eventalign_data_complement = []
data[1] = int(data[1])
data[3] = int(data[3])
data[5] = int(data[5])
data[6] = float(data[6])
data[7] = float(data[7])
data[8] = float(data[8])
data[10] = float(data[10])
data[11] = float(data[11])
data[12] = float(data[12])
if str(data[4]) == 't':
eventalign_data_template.append(tuple(data))
else:
eventalign_data_complement.append(tuple(data))
# print(int(data[3]), read_number)
t = np.array(eventalign_data_template, dtype=dtype)
c = np.array(eventalign_data_complement, dtype=dtype)
assert t or c, "Check reference genome, no alignment generated for any read: {}".format(reference)
yield t, c, fast5_files[read_number]
def call_eventalign_script(fast5_dir, reference, output_dir, threads=1, overwrite=False):
"""Call eventalign script from scripts folder"""
# call_eventalign.sh -f ../test_files/minion-reads/canonical/ -t 1 -r ~/data/example_references/ecoli_k12_mg1655.fa -o ~/CLionProjects/nanopolish/dnacanonical/
call_eventalign_exe = "call_eventalign.sh"
eventalign_output_path = os.path.join(output_dir, "eventalign.txt")
eventalign_fofn_path = os.path.join(output_dir, "all_files.fastq.index.readdb")
if not os.path.exists(eventalign_output_path) or overwrite:
subprocess.call([call_eventalign_exe, '-f', fast5_dir, '-t', str(threads), '-r', reference, '-o', output_dir])
return eventalign_output_path, eventalign_fofn_path
def embed_eventalign_events(fast5_dir, reference, output_dir, threads=1, overwrite=False):
"""Call eventalign and embed events"""
event_generator = get_eventalign_events(fast5_dir,
reference,
output_dir,
threads=threads,
overwrite=overwrite)
attributes = None
#TODO add attributes to event table
for template, complement, fast5path in event_generator:
print(fast5path)
print("template", template)
if template or complement:
handle = Fast5(fast5path, read='r+')
handle.set_eventalign_table(template=template, complement=complement, meta=attributes, overwrite=True)
else:
print("{} did not align".format(fast5path))
return True
def match_events_with_eventalign(events=None, event_detections=None, minus=False, rna=False):
"""Match event index with event detection data to label segments of signal for each kmer
# RNA is sequenced 3'-5'
# reversed for fasta/q sequence
# if mapped to reverse strand
# reverse reverse complement = complement
# DNA is sequenced 5'-3'
# if mapped to reverse strand
# reverse complement
:param events: events table reference_index', 'event_index', 'aligned_kmer', 'posterior_probability
:param event_detections: event detection event table
:param minus: boolean option to for minus strand mapping
:param rna: boolean for RNA read
"""
assert events is not None, "Must pass signal alignment events"
assert event_detections is not None, "Must pass event_detections events"
check_numpy_table(events, req_fields=('position', 'event_index',
'reference_kmer'))
check_numpy_table(event_detections, req_fields=('start', 'length'))
label = np.zeros(len(events), dtype=[('raw_start', int), ('raw_length', int), ('reference_index', int),
('posterior_probability', float), ('kmer', 'S6')])
label['raw_start'] = [event_detections[x]["start"] for x in events["event_index"]]
label['raw_length'] = [event_detections[x]["length"] for x in events["event_index"]]
label['reference_index'] = events["position"]
def convert_to_str(string):
"""Helper function to catch bytes as strings"""
if type(string) is str:
return string
else:
return bytes.decode(string)
flip = ReverseComplement()
if minus:
if rna:
kmers = [flip.complement(convert_to_str(x)) for x in events["reference_kmer"]]
else:
kmers = [flip.reverse_complement(convert_to_str(x)) for x in events["reference_kmer"]]
else:
if rna:
kmers = [flip.reverse(convert_to_str(x)) for x in events["reference_kmer"]]
else:
kmers = events["reference_kmer"]
label['kmer'] = kmers
label['posterior_probability'] = np.ones(len(events))
# np.sort(label, order='raw_start', kind='mergesort')
return label
def main():
"""Main docstring"""
start = timer()
# sam = "/Users/andrewbailey/CLionProjects/nanopore-RNN/signalAlign/bin/test_output/tempFiles_alignment/tempFiles_miten_PC_20160820_FNFAD20259_MN17223_mux_scan_AMS_158_R9_WGA_Ecoli_08_20_16_83098_ch138_read23_strand/temp_sam_file_5048dffc-a463-4d84-bd3b-90ca183f488a.sam"\
# rna_read = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/rna_reads/DEAMERNANOPORE_20170922_FAH26525_MN16450_sequencing_run_MA_821_R94_NA12878_mRNA_09_22_17_67136_read_36_ch_218_strand.fast5"
# dna_read = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/miten_PC_20160820_FNFAD20259_MN17223_sequencing_run_AMS_158_R9_WGA_Ecoli_08_20_16_43623_ch100_read280_strand.fast5"
dna_read = "/Users/andrewbailey/CLionProjects/nanopore-RNN/nanotensor/tests/test_files/minion-reads/canonical/miten_PC_20160820_FNFAD20259_MN17223_sequencing_run_AMS_158_R9_WGA_Ecoli_08_20_16_43623_ch100_read280_strand.fast5"
dna_read2 = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/miten_PC_20160820_FNFAD20259_MN17223_mux_scan_AMS_158_R9_WGA_Ecoli_08_20_16_83098_ch138_read23_strand.fast5"
# dna_read3 = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/over_run/miten_PC_20160820_FNFAD20259_MN17223_mux_scan_AMS_158_R9_WGA_Ecoli_08_20_16_83098_ch138_read23_strand.fast5"
dna_read4 = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/consortium_r94_human_dna/rel3-fast5-chr1.part04/DEAMERNANOPORE_20161206_FNFAB49164_MN16450_sequencing_run_MA_821_R9_4_NA12878_12_06_16_71094_ch190_read404_strand.fast5"
reference = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/reference-sequences/ecoli_k12_mg1655.fa"
reference2 = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/reference-sequences/fake_rna.fa"
out_ref = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/reference-sequences/fake_rna_reversed.fa"
# ReverseComplement().convert_write_fasta(reference2, out_ref, complement=False, reverse=True)
# rh = ReferenceHandler(reference)
# seq = rh.get_sequence(chromosome_name="Chromosome", start=623200, stop=623216)
# print(seq)
# print("CCACGGGTCCGTCTGG")
# print("Reference")
# print(seq)
# print(ReverseComplement().complement(seq))
# print("Query")
# print("CCACGGGTCCGTCTGG")
# print(ReverseComplement().complement("CCACGGGTCCGTCTGG"))
# seq = rh.get_sequence(chromosome_name="Chromosome", start=623200-5, stop=623200)
# print(seq)
fast5_dir = "/Users/andrewbailey/CLionProjects/nanopore-RNN/nanotensor/tests/test_files/minion-reads/canonical/"
output_dir = "/Users/andrewbailey/data/test_event_align_output"
fast5_dir2 = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/consortium_r94_human_dna/rel3-fast5-chr1.part04/"
embed_eventalign_events(fast5_dir2, reference, output_dir, threads=1, overwrite=True)
f5handle = Fast5(dna_read4)
section = "template"
ea_events = f5handle.get_eventalign_events(section=section)
print(ea_events.dtype)
print(ea_events)
# TODO match labels with events and raw signal
events = f5handle.get_basecall_data(section=section)
sampling_freq = f5handle.sample_rate
start_time = f5handle.raw_attributes['start_time']
events = time_to_index(events, sampling_freq=sampling_freq, start_time=start_time)
lables = match_events_with_eventalign(events=ea_events, event_detections=events)
# handle = Fast5(dna_read2)
# events, corr_start_rel_to_raw = handle.get_corrected_events()
# events["start"] += corr_start_rel_to_raw
# sequence = ''.join([bytes.decode(x) for x in events['base']])
# hit = get_minimap_alignment(reference, sequence, preset='map-ont')
# print(hit.r_st)
# print(hit.r_en)
# print(hit.strand)
# if str(hit.cigar_str) == str(len(sequence))+'M':
# print("Yes")
# print(events.dtype)
# cigar_label = np.zeros(len(sequence), dtype=[('raw_start', int), ('raw_length', int), ('reference_index', int),
# ('posterior_probability', float), ('kmer', 'S5')])
# # assign labels
# cigar_label['raw_start'] = events["start"]
# cigar_label['raw_length'] = events["length"]
# cigar_label['reference_index'] = list(range(hit.r_st, hit.r_en))
# cigar_label['kmer'] = events['base']
# cigar_label['posterior_probability'] = [1 for _ in range(hit.r_st, hit.r_en)]
#
# print(cigar_label)
# events = handle.get_resegment_basecall()
# kmer_length = 5
# def make_map(events):
# event_map = [0]
# previous_prob = 0
# for i, line in islice(enumerate(events), 1, None):
# print(i, line)
# move = line['move']
# this_prob = line['p_model_state']
# if move == 1:
# event_map.append(i)
# if move > 1:
# for skip in range(move - 1):
# event_map.append(i - 1)
# event_map.append(i)
# if move == 0:
# if this_prob > previous_prob:
# event_map[-1] = i
# previous_prob = this_prob
# final_event_index = [event_map[-1]]
# padding = final_event_index * (kmer_length - 1)
# event_map = event_map + padding
# return event_map
# # print(events)
# print(len(seq))
# print(len(make_map(events)))
stop = timer()
print("Running Time = {} seconds".format(stop - start), file=sys.stderr)
if __name__ == "__main__":
main()
raise SystemExit
| mit |
akretion/openerp-server | openerp/report/render/rml2txt/__init__.py | 77 | 1385 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from rml2txt import parseString, parseNode
#.apidoc title: RML to TXT engine
""" This engine is the minimalistic renderer of RML documents into text files,
using spaces and newlines to format.
It was needed in some special applications, where legal reports need to be
printed in special (dot-matrix) printers.
"""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
karteum/Duplicide | duplicide.py | 1 | 20443 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Duplicide - a tool to detect duplicate files and dirs !
# Author: Adrien Demarez (adrien.demarez@free.fr)
# Version: 20140119
# License: GPLv3
# Usage: python duplicide.py dir1/ [dir2] [...]
#
# Goals:
# - detect duplicate files (like fslint and any other similar tools), as quickly as possible i.e. without performing hash/compare unnecessarily and without unnecessary I/O, and with a proper progressbar
# - detect duplicate dirs (i.e. don't output files/dirs as a result if the whole parent dir is itself a duplicate).
# - don't output pseudo-duplicate that are hard links. Process by sorted inode in order to limit HDD seeks (like fslint in https://code.google.com/p/fslint/source/browse/trunk/fslint/findup )
# - can process hash on a small fraction of the files instead of the whole file in order to speedup tests (at the expense of reliability / risk of false positive => human double-check is necessary !). In practical situations (99% of the time), identical size + identical crc32 on fist kbytes is OK...
# - it's only a detection program. What to do with duplicates (e.g. delete them or making hard links or symlinks or any other idea) is up to the user or calling script/program.
#
# TODO:
# - handle not only identical dirs, but also "similar" ones
# - handle case where a "clean reference dir" is given, so that it outputs the duplicate files/dirs outside of this reference (i.e. that can be deleted) and the ones that have to be backup'ed inside the reference.
# - allow to serialize the computed hashes
# - clean-up code, which is currently spaghetti-like. I'd like to isolate more the function+data they operate on. Currently, nearly all functions do something on ~all the global variables
#
# DISCLAIMER: as it is mentioned in the GPLv3 (but I prefer to repeat it): there is no warranty of any kind with this tool. It is not bug-free. Use it at your own risk !
# Among other things (and not limited to this), it may output wrong results such as false positive and therefore it requires human double-check before you delete any (supposed) duplicate data it outputs.
# I am not responsible for any data-loss caused directly or indirectly by this program !
import os,sys,mmap,stat
import zlib #import binascii
import hashlib #import md5
import filecmp
from collections import defaultdict
import time
import fnmatch # re ?
#import argparse
#import platform
from multiprocessing import Process, Array
import random
import cPickle
import sqlite3,xxhash
# TODO: allow disjoint chunks ? allow rdiff-like sliding-window CRC ?
def checksum_file(filename, size=-1, hashalgo='xxhash', chunk=-1): # (1<<12)
# This function used to rely on mmap(), however this is an issue for big files on 32 bits machines
#~ MAXMAPSIZE = 1<<(int(platform.architecture()[0][0:2])-1) - 1<<20 # FIXME: it also assumes number of bits takes 2 digits, so it does not work for 128bit platforms ! :). The 1<<20 is to take a small margin.
"""Performs a hash (CRC32, or any other supported by hashlib such as MD5 or SHA256) on the first [size] bytes of the file [filename]"""
#return random.randint(0,1<<32-1)
maxsize = int(os.stat(filename).st_size) - 1 # FIXME: why -1 ?
readsize = (size > 0) and min(maxsize, size) or maxsize
readchunk = (chunk > 0) and min(chunk, readsize) or readsize
with open(filename,'r') as fh:
readoffset = 0
#~ map = mmap.mmap(fh.fileno(), realsize, mmap.MAP_PRIVATE, mmap.PROT_READ)
#~ map = mmap.mmap(fh.fileno(), CHUNK, access=mmap.ACCESS_READ, offset=readoffset)
if (hashalgo == "crc32" or hashalgo == "adler32"):
crcfun = (hashalgo == "adler32") and zlib.adler32 or zlib.crc32 # or binascii.crc32
mycrc = 0
while (readoffset < readsize):
buf = fh.read(readchunk)
mycrc = crcfun(buf, mycrc)
readoffset += len(buf)
return hex(mycrc & 0xffffffff) #~ hex(zlib.crc32(map[0:realsize]) & 0xffffffff)
else:
if hashalgo == "md5":
digest = hashlib.new(hashalgo) # or md5.new()
else:
digest = xxhash.xxhash64()
while (readoffset < readsize): # while len(buf) > 0 ?
buf = fh.read(readchunk)
digest.update(buf) #~ (map[0:realsize])
readoffset += len(buf)
return digest.hexdigest()
#~ map.close()
def checksum_props(props, hashalgo='crc32'):
"""Returns a hash from the 'properties' of a directory (size and MD5 of child files and subdirs, etc.). If two dirs have the same 'md5props', they will be considered as duplicates"""
if (hashalgo == "crc32"):
result = hex(zlib.crc32(str(props)) & 0xffffffff) # or binascii.crc32
else:
digest = hashlib.new(hashalgo)
digest.update(str(props))
result = digest.hexdigest()
return result
class progresswalk:
"""Progress indicator for os.walk""" # The implementation may look complex, but it's because it tries to put the right "weigth" to dirs according to how deep they are in the filesystem
def __init__(self, init_path):
self.init_path = init_path
self.init_depth = init_path.count('/')
self.numdirs = [0+0j]
def update(self, dir, dirs):
# numdirs[] is processed as a "polynom". It is using complex numbers in order to avoid using 2 lists: real part is total number of dirs, and imag part is number of processed dirs
current_depth = dir.count('/') - self.init_depth
if len(self.numdirs) < current_depth+2:
self.numdirs.append(0+0j)
self.numdirs[current_depth+1] += len(dirs)
walkedRatio=0
# compact the "polynom" numdirs with each "digit" in 0-9, and fit it into a "normal" integer
for i in range(1, len(self.numdirs)-2): # [1:len(numdirs)-2] because the first value is 0 and the last value may be 0, and we want to avoid division by 0 !
walkedRatio = walkedRatio*10 + int((9*self.numdirs[i].imag)/self.numdirs[i].real)
completion = (100*walkedRatio) / (10**(len(self.numdirs)-3))
self.numdirs[current_depth] += 1j
#sys.stderr.write("\rScanning: [%d %%] %s" % (completion,str(self.numdirs))) # self.init_path
sys.stderr.write("\rScanning: [%d %%]" % (completion,)) # self.init_path
class dupcontext:
def __init__(self):
# FIXME: replace all lists by sets ? Put those global variables into a class ?
# For files
self.sizeinodes = defaultdict(list) ; self.inodessizes = defaultdict(int)
self.inodesfiles = defaultdict(list) ; self.filesinodes = defaultdict(int)
self.hashinodes = defaultdict(list) ; self.inodeshash = {}
# For dirs : for each dir key, dirsAttrsOnFiles[key] (resp. dirsAttrsOnSubdirs for subdirs instead of subfiles) is a list of entries. values[0] is the number of files, values[1] is the size of the dir, then values[2:..] contains the file sizes, then we also push later computed hashes for files
self.dirsAttrsOnFiles = {} ; self.dirsAttrsOnSubdirs = {}
self.sizedirs = defaultdict(list) ; self.dirsizes = defaultdict(int)
self.hashdirs = defaultdict(list) ; self.dirshash = {}
self.roots = []
self.dupresultsD = defaultdict(list) ; self.dupresultsF = defaultdict(list) # the result
#incdirs = defaultdict(list) ; incfiles = defaultdict(list)
def __add_file(self, dir, file):
path = dir + "/" + file
if not os.path.exists(path) or not os.access(path, os.R_OK):
# Skip broken symlinks, and cases where we do not have access rights. TODO: check whether access rights are tied to inode or path
sys.stderr.write("Unable to access %s!\n" % (path,))
return 0
filestat = os.lstat(path)
size = int(filestat.st_size)
if (not option_include_nullfiles and size == 0) or (not option_include_nonregfiles and not stat.S_ISREG(filestat.st_mode)): # not os.path.isfile(path):
# FIXME: include those files in another db, so that comparing dirs will not omit them ? or just serialize the whole stat().st_mode in dirsAttrsOnSubdirs ?
return 0
self.dirsAttrsOnFiles[dir].append(size) # BUGFIX: case where same number of subfiles, different file sizes but same sum(sizes), different filemd5
fakeino = (filestat.st_dev << 32) | filestat.st_ino # "fake" inode (merging dev and inode in order to get a unique ID. FIXME: maybe st_dev can change across reboots or device insertion/removal ? In that case, it would be dangerous to serialize/deserialize and mix loaded info with new scanned info ?
if option_include_hardlinks or not fakeino in self.inodesfiles:
self.sizeinodes[size].append(fakeino) # FIXME: use set instead of list to ensure unicity !
else:
print "skipping " + path # FIXME: is it really skipped ? what should happen to the following lines ?
self.filesinodes[path] = fakeino
self.inodesfiles[fakeino].append(path)
self.inodessizes[fakeino] = size
return size
# ftw() in the whole dir structure and compute hash on the relevant files and dirs
def scandir(self, init_path):
"""Add a dir to the scan context"""
self.roots.append(init_path)
progress = progresswalk(init_path)
for (dir, dirs, files) in os.walk(init_path):
progress.update(dir, dirs)
for excludetest in option_excludelist:
if fnmatch.fnmatch(dir, excludetest):
continue
# Processing current dir : compute dir size, and store file sizes into sizefiles
dirsize = 0
self.dirsAttrsOnFiles[dir] = [len(files)]
self.dirsAttrsOnSubdirs[dir] = [len(dirs)]
for file in files:
dirsize += self.__add_file(dir, file)
# Increment all parents dir size with current dir size
while(dirsize > 0 and dir != init_path and dir != '/'):
self.dirsizes[dir] += dirsize
dir = os.path.dirname(dir)
#self.dirsizes[init_path] += (dirsize) > 0 and dirsize or 0 # FIXME : if two toplevels are identical ?
# Reverse sizes for dirs
for (dir,size) in self.dirsizes.iteritems():
self.sizedirs[size].append(dir)
self.dirsAttrsOnSubdirs[dir][0] |= size << 32 #dirsAttrsOnSubdirs[dir].append(size)
def loadfrom(self, file):
"""Load context from file"""
pass
def saveto(self, file):
"""Save context to file"""
pass
def process(self):
"""Launch analyze of the context in order to find duplicate dirs and files"""
self.__compute_fileshash()
self.__compute_dirshash()
self.__compute_duplicates()
def __compute_fileshash(self):
# See which files need to be looked at for computing hash (i.e. only if several files have the same size)
# Compute hash for files (sorting by inode in order to use disk internal buffering/readahead and avoid disk seeks)
# TODO: put this in a function and use multithreading for the computation ? (N.B. is it I/O or CPU bounded, especially in case of crc32 on the first 64k ?)
# needs: sizeinodes, inodessizes, inodesfiles, inodeshash, hashinodes, dirsAttrsOnFiles
inodes_tohash = []
#for inodelist in filter(lambda x: len(x)>1, self.sizeinodes.values()):
for inodelist in filter(lambda x: len(x)>=1, self.sizeinodes.values()):
inodes_tohash.extend(inodelist)
i = 0 ; total = len(inodes_tohash)
for inode in sorted(inodes_tohash):
curr_size = self.inodessizes[inode]
file0 = self.inodesfiles[inode][0] # N.B. the access rights and other properties are identical to all hard links as they are bound to the inode
curr_hash = "%s_%s" % (curr_size, checksum_file(file0, size=SIZE_CKSUM, hashalgo='md5')) # adler32
for file in self.inodesfiles[inode]:
self.dirsAttrsOnFiles[os.path.dirname(file)].append(curr_hash)
#~ if not inode in self.inodeshash.keys():
self.hashinodes[curr_hash].append(inode)
self.inodeshash[inode] = curr_hash
completion = (100*i)/total
sys.stderr.write("\rComputing checksums for dev/inode 0x%x: [%d %%]" % (inode, completion))
i+=1
def __isdup(self, path):
return self.inodeshash[self.filesinodes[path]] > 1
def __numdups(self, dupentry):
# Returns how many duplicate siblings this directory has
if not dupentry in self.dirshash: return -1
return len(self.hashdirs[self.dirshash[dupentry]])
def __bestParent(self, currentdir):
parentdir = os.path.dirname(currentdir)
currentdir = parentdir
while (parentdir in self.dirshash and len(self.hashdirs[self.dirshash[parentdir]])>1):
#incdirs[parentdir].append(currentdir)
currentdir = parentdir
parentdir = os.path.dirname(parentdir)
return currentdir #dirshash[currentdir] # FIXME: hashdirs[] ?
def __compute_dirshash(self):
# Compute "hash" for relevant dirs
# FIXME: I would like to avoid another os.walk, however doing it bottom-up is the easiest (though not best) way to compute hashes for leaf dirs first (hopefully the previous os.walk() is still in the OS cache despite the more recent computation of file checksums...)
# needs: init_path, sizedirs, dirsizes, dirsAttrsOnSubdirs, dirsAttrsOnFiles, dirshash, hashdirs
for init_path in self.roots:
for (dir, dirs, files) in os.walk(init_path, topdown=False):
files2 = [tmp for tmp in files if (dir+'/'+tmp) in self.filesinodes]
# If any subdir or subfile wasn't added to the properties, it means it is not a duplicate, and therefore the current dir is not a duplicate
if(len(self.sizedirs[self.dirsizes[dir]])>1 and len(self.dirsAttrsOnSubdirs[dir])==(1+len(dirs)) and len(self.dirsAttrsOnFiles[dir])==(1+2*len(files2))):
tmp_props = sorted(self.dirsAttrsOnFiles[dir])
tmp_props.extend(sorted(self.dirsAttrsOnSubdirs[dir]))
curr_hash = "%s_%s" % (self.dirsizes[dir], checksum_props(tmp_props, hashalgo='md5')) # crc32
self.dirshash[dir] = curr_hash
self.hashdirs[curr_hash].append(dir)
self.dirsAttrsOnSubdirs[os.path.dirname(dir)].append(curr_hash)
else:
# print "Skipping %s %d %d %d %s" % (dir, len(self.dirsAttrsOnSubdirs[dir]), len(dirs), len(self.sizedirs[self.dirsizes[dir]]), str(self.dirsAttrsOnSubdirs[dir]))
print "Skipping %s : %s -- %d __ %s -- %d %d __ %s -- %d %d\n" % (dir, self.sizedirs[self.dirsizes[dir]], len(self.sizedirs[self.dirsizes[dir]]), self.dirsAttrsOnSubdirs[dir], len(self.dirsAttrsOnSubdirs[dir]), 1+len(dirs), self.dirsAttrsOnFiles[dir], len(self.dirsAttrsOnFiles[dir]),(1+2*len(files2)))
def __compute_duplicates(self):
#~ dupdirset = set()
for dupdirs in filter(lambda x: len(x)>1, self.hashdirs.values()):
for currentdir in dupdirs:
parentdir = self.__bestParent(currentdir)
if not parentdir in self.dupresultsD[self.dirshash[parentdir]]: # FIXME: can it happen since we use bestparent ?
self.dupresultsD[self.dirshash[parentdir]].append(parentdir)
#~ dupdirset.add(parentdir)
for inodelist in filter(lambda x: len(x)>1, self.hashinodes.values()):
for dupinode in inodelist:
#~ if len(self.inodesfiles[dupinode]) > 1:
#~ print "hard link" + str(self.inodesfiles[dupinode])
for dupfile in self.inodesfiles[dupinode]: # FIXME: no need for a for() loop since we already discarded hard links !
currentdir = os.path.dirname(dupfile)
if not currentdir in self.dirshash or len(self.hashdirs[self.dirshash[currentdir]]) < 2:
self.dupresultsF[self.inodeshash[dupinode]].append(dupinode)
# TODO: compute hashset() for the dir, to compare with peers and be able to eventually say dirA > dirB...
# TODO: double-check with filecmp.cmp()
def __print_duplicates(self, dupresults, files=False):
# TODO: allow mixed files/dirs output
resultsorted = {}
for k,v in dupresults.iteritems():
resultsorted[int(k.partition('_')[0])] = v
for k in sorted(resultsorted.keys()):
sys.stdout.write("%d kB * %d: " % ((k>>10)+1, len(resultsorted[k])))
if(files==True):
for inode in resultsorted[k]:
sys.stdout.write(str(self.inodesfiles[inode]) + ", ")
print ""
else:
print str(resultsorted[k])
def print_dupdirs(self):
"""Print duplicate dirs found in the context"""
self.__print_duplicates(self.dupresultsD)
def print_dupfiles(self):
"""Print duplicate files found in the context"""
self.__print_duplicates(self.dupresultsF, True)
def checkincluded(self, init_path, ref):
result = True
routourne = "/-\\|"
i=0
for path in [tmp for tmp in self.filesinodes.keys() if tmp.startswith(init_path)]:
inode = self.filesinodes[path]
myhash = self.inodeshash[inode]
if len(self.hashinodes[myhash]) < 2:
print "KO %s has no duplicates" % (path,)
result = False
inoderef = False
i+=1
for inode in self.hashinodes[myhash]:
for tmp in self.inodesfiles[inode]:
if tmp.startswith(ref+'/'):
inoderef = True
sys.stdout.write('%s\r' % (routourne[i % 4],)) #print "OK %s has a duplicate in %s" % (path, tmp)
if (not inoderef):
result = False
print "KO %s is has no equivalent in %s" % (path, ref)
return result
def checkincluded3(self, init_path, ref):
result = True
routourne = "/-\\|"
i=0
for (dir, dirs, files) in os.walk(init_path, topdown=False):
files2 = [dir + '/' + tmp for tmp in files if dir+'/'+tmp in self.filesinodes and self.filesinodes[dir+'/'+tmp] != 0]
for path in files2:
inode = self.filesinodes[path]
# print "%s : %d" % (path, inode)
myhash = self.inodeshash[inode]
if len(self.hashinodes[myhash]) < 2:
print "KO %s has no duplicates" % (path,)
result = False
inoderef = False
i+=1
for inode in self.hashinodes[myhash]:
for tmp in self.inodesfiles[inode]:
if tmp.startswith(ref+'/'):
inoderef = True
sys.stdout.write('%s\r' % (routourne[i % 4],)) #print "OK %s has a duplicate in %s" % (path, tmp)
if (not inoderef):
result = False
print "KO %s is has no equivalent in %s" % (path, ref)
return result
# Options
SIZE_CKSUM = (1<<16)
option_include_nullfiles = False
option_include_nonregfiles = False # FIXME: 'True' case is not handled yet !
option_include_hardlinks = False
option_excludelist = []
if __name__ == "__main__":
#parser = argparse.ArgumentParser(description='Detects all the duplicate files and dirs')
#parser.add_argument('-f', action='store_true', default=False, help="display duplicate files")
#args=parser.parse_args()
#print args
context = dupcontext()
if len(sys.argv)==1:
print "Usage: duplicide dir1/ [dir2/ dir3/ ...]"
sys.exit (0)
for arg in sys.argv:
init_path = arg.rstrip('/')
context.scandir(init_path)
context.process()
print "\nDuplicate dirs:"
context.print_dupdirs()
print "\nDuplicate files:"
context.print_dupfiles()
with open("/tmp/duplicide.pickle", 'w') as fh:
cPickle.dump(context, fh)
| gpl-3.0 |
xuzhao1211/OnlineExam | misago/users/tests/test_forgottenpassword_views.py | 8 | 1511 | from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase
from misago.users.tokens import make_password_change_token
class ForgottenPasswordViewsTests(TestCase):
def test_request_view_returns_200(self):
"""request new password view returns 200"""
response = self.client.get(reverse('misago:forgotten_password'))
self.assertEqual(response.status_code, 200)
def test_change_view_returns_200(self):
"""change password view returns 200"""
User = get_user_model()
test_user = User.objects.create_user('Bob', 'bob@test.com', 'Pass.123')
response = self.client.get(
reverse('misago:forgotten_password_change_form', kwargs={
'user_id': test_user.id,
'token': make_password_change_token(test_user)
}))
self.assertEqual(response.status_code, 200)
# test invalid user
response = self.client.get(
reverse('misago:forgotten_password_change_form', kwargs={
'user_id': 7681,
'token': 'a7d8sa97d98sa798dsa'
}))
self.assertEqual(response.status_code, 200)
# test invalid token
response = self.client.get(
reverse('misago:forgotten_password_change_form', kwargs={
'user_id': test_user.id,
'token': 'asd79as87ds9a8d7sa'
}))
self.assertEqual(response.status_code, 200)
| gpl-2.0 |
cloudbase/nova-docker | novadocker/tests/virt/docker/mock_client.py | 6 | 7794 | # Copyright (c) 2013 dotCloud, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import time
import uuid
from oslo_utils import timeutils
from six import moves
from novadocker.virt.docker import client as docker_client
class MockClient(object):
def __init__(self, endpoint=None):
self._containers = {}
self.name = None
# Fake repository
self._repository = {'image_with_cmd':
{'ContainerConfig':
{'Cmd': 'echo Test'}},
'image_without_cmd':
{'ContainerConfig':
{'Cmd': None}}}
self._images = {'snap-1':
{'ContainerConfig':
{'Cmd': None}}}
self._image_data = {'snap-1': 'dummy'}
self._setup_decorators()
def _setup_decorators(self):
for name, member in inspect.getmembers(self, inspect.ismethod):
if not name.startswith('_'):
setattr(self, name, docker_client.filter_data(member))
def _fake_id(self):
return uuid.uuid4().hex + uuid.uuid4().hex
def _image_name(self, image_name):
"""Split full image name to host and image name."""
if '/' in image_name:
host, image_name = image_name.split('/', 1)
return image_name
def _is_image_exists(self, image_name):
"""Check whether Images is listed in self._repository."""
image_name = self._image_name(image_name)
if image_name in self._repository:
return image_name in self._images
return True
def _is_daemon_running(self):
return True
def containers(self, all=True):
containers = []
for container_id in self._containers.iterkeys():
containers.append({
'Status': 'Exit 0',
'Created': int(time.time()),
'Image': 'ubuntu:12.04',
'Ports': '',
'Command': 'bash ',
'Id': container_id
})
return containers
def create_container(self, image_name, **args):
self.name = args['name']
data = {
'Hostname': args['hostname'],
'User': '',
'Memory': args['mem_limit'],
'MemorySwap': 0,
'AttachStdin': False,
'AttachStdout': False,
'AttachStderr': False,
'PortSpecs': None,
'Tty': True,
'OpenStdin': True,
'StdinOnce': False,
'Env': None,
'Cmd': [],
'Dns': None,
'Image': image_name,
'Volumes': {},
'VolumesFrom': '',
'CpuShares': args['cpu_shares'],
'NetworkDisabled': args['network_disabled']
}
data.update(args)
if not self._is_image_exists(data['Image']):
return None
container_id = self._fake_id()
self._containers[container_id] = {
'Id': container_id,
'running': False,
'Config': data
}
return container_id
def start(self, container_id, binds=None, dns=None):
if container_id not in self._containers:
return False
self._containers[container_id]['running'] = True
return True
def inspect_image(self, image_name):
if not self._is_image_exists(image_name):
return None
image_name = self._image_name(image_name)
if image_name in self._images:
return self._images[image_name]
return {'ContainerConfig': {'Cmd': None}}
def inspect_container(self, container_id):
if container_id not in self._containers:
return
container = self._containers[container_id]
info = {
'Args': [],
'Config': container['Config'],
'Created': str(timeutils.utcnow()),
'Id': container_id,
'Image': self._fake_id(),
'NetworkSettings': {
'Bridge': '',
'Gateway': '',
'IPAddress': '',
'IPPrefixLen': 0,
'PortMapping': None
},
'Path': 'bash',
'ResolvConfPath': '/etc/resolv.conf',
'State': {
'ExitCode': 0,
'Ghost': False,
'Pid': 0,
'Running': container['running'],
'StartedAt': str(timeutils.utcnow())
},
'SysInitPath': '/tmp/docker',
'Volumes': {},
}
return info
def stop(self, container_id, timeout=None):
if container_id not in self._containers:
return False
self._containers[container_id]['running'] = False
return True
def kill(self, container_id):
if container_id not in self._containers:
return False
self._containers[container_id]['running'] = False
return True
def remove_container(self, container_id, force=False):
if container_id not in self._containers:
return False
# Docker doesn't allow to destroy a running container.
if self._containers[container_id]['running']:
return False
del self._containers[container_id]
return True
def unpause(self, container_id):
if container_id not in self._containers:
return False
self._containers[container_id]['paused'] = False
return True
def pause(self, container_id):
if container_id not in self._containers:
return False
self._containers[container_id]['paused'] = True
return True
def commit(self, container_id, repository=None, tag=None):
if container_id not in self._containers:
return False
return True
def get_container_logs(self, container_id):
if container_id not in self._containers:
return False
return '\n'.join([
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. ',
'Vivamus ornare mi sit amet orci feugiat, nec luctus magna ',
'vehicula. Quisque diam nisl, dictum vitae pretium id, ',
'consequat eget sapien. Ut vehicula tortor non ipsum ',
'consectetur, at tincidunt elit posuere. In ut ligula leo. ',
'Donec eleifend accumsan mi, in accumsan metus. Nullam nec ',
'nulla eu risus vehicula porttitor. Sed purus ligula, ',
'placerat nec metus a, imperdiet viverra turpis. Praesent ',
'dapibus ornare massa. Nam ut hendrerit nunc. Interdum et ',
'malesuada fames ac ante ipsum primis in faucibus. ',
'Fusce nec pellentesque nisl.'])
def get_image(self, name):
if (name not in self._images or
name not in self._image_data):
raise Exception("Image not found - %s" % name)
return moves.StringIO(self._image_data[name])
def load_image(self, name, data):
self._image_data[name] = data
def load_repository_file(self, name, path):
pass
def ping(self):
return True
| apache-2.0 |
toobaz/pandas | pandas/tests/indexing/test_loc.py | 1 | 33581 | """ test label based indexing with loc """
from io import StringIO
import re
from warnings import catch_warnings, filterwarnings
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Series, Timestamp, date_range
from pandas.api.types import is_scalar
from pandas.tests.indexing.common import Base
from pandas.util import testing as tm
class TestLoc(Base):
def test_loc_getitem_dups(self):
# GH 5678
# repeated getitems on a dup index returning a ndarray
df = DataFrame(
np.random.random_sample((20, 5)), index=["ABCDE"[x % 5] for x in range(20)]
)
expected = df.loc["A", 0]
result = df.loc[:, 0].loc["A"]
tm.assert_series_equal(result, expected)
def test_loc_getitem_dups2(self):
# GH4726
# dup indexing with iloc/loc
df = DataFrame(
[[1, 2, "foo", "bar", Timestamp("20130101")]],
columns=["a", "a", "a", "a", "a"],
index=[1],
)
expected = Series(
[1, 2, "foo", "bar", Timestamp("20130101")],
index=["a", "a", "a", "a", "a"],
name=1,
)
result = df.iloc[0]
tm.assert_series_equal(result, expected)
result = df.loc[1]
tm.assert_series_equal(result, expected)
def test_loc_setitem_dups(self):
# GH 6541
df_orig = DataFrame(
{
"me": list("rttti"),
"foo": list("aaade"),
"bar": np.arange(5, dtype="float64") * 1.34 + 2,
"bar2": np.arange(5, dtype="float64") * -0.34 + 2,
}
).set_index("me")
indexer = tuple(["r", ["bar", "bar2"]])
df = df_orig.copy()
df.loc[indexer] *= 2.0
tm.assert_series_equal(df.loc[indexer], 2.0 * df_orig.loc[indexer])
indexer = tuple(["r", "bar"])
df = df_orig.copy()
df.loc[indexer] *= 2.0
assert df.loc[indexer] == 2.0 * df_orig.loc[indexer]
indexer = tuple(["t", ["bar", "bar2"]])
df = df_orig.copy()
df.loc[indexer] *= 2.0
tm.assert_frame_equal(df.loc[indexer], 2.0 * df_orig.loc[indexer])
def test_loc_setitem_slice(self):
# GH10503
# assigning the same type should not change the type
df1 = DataFrame({"a": [0, 1, 1], "b": Series([100, 200, 300], dtype="uint32")})
ix = df1["a"] == 1
newb1 = df1.loc[ix, "b"] + 1
df1.loc[ix, "b"] = newb1
expected = DataFrame(
{"a": [0, 1, 1], "b": Series([100, 201, 301], dtype="uint32")}
)
tm.assert_frame_equal(df1, expected)
# assigning a new type should get the inferred type
df2 = DataFrame({"a": [0, 1, 1], "b": [100, 200, 300]}, dtype="uint64")
ix = df1["a"] == 1
newb2 = df2.loc[ix, "b"]
df1.loc[ix, "b"] = newb2
expected = DataFrame({"a": [0, 1, 1], "b": [100, 200, 300]}, dtype="uint64")
tm.assert_frame_equal(df2, expected)
def test_loc_getitem_int(self):
# int label
self.check_result(
"int label", "loc", 2, "ix", 2, typs=["ints", "uints"], axes=0
)
self.check_result(
"int label", "loc", 3, "ix", 3, typs=["ints", "uints"], axes=1
)
self.check_result(
"int label", "loc", 2, "ix", 2, typs=["label"], fails=KeyError
)
def test_loc_getitem_label(self):
# label
self.check_result("label", "loc", "c", "ix", "c", typs=["labels"], axes=0)
self.check_result("label", "loc", "null", "ix", "null", typs=["mixed"], axes=0)
self.check_result("label", "loc", 8, "ix", 8, typs=["mixed"], axes=0)
self.check_result(
"label", "loc", Timestamp("20130102"), "ix", 1, typs=["ts"], axes=0
)
self.check_result(
"label", "loc", "c", "ix", "c", typs=["empty"], fails=KeyError
)
def test_loc_getitem_label_out_of_range(self):
# out of range label
self.check_result(
"label range",
"loc",
"f",
"ix",
"f",
typs=["ints", "uints", "labels", "mixed", "ts"],
fails=KeyError,
)
self.check_result(
"label range", "loc", "f", "ix", "f", typs=["floats"], fails=KeyError
)
self.check_result(
"label range",
"loc",
20,
"ix",
20,
typs=["ints", "uints", "mixed"],
fails=KeyError,
)
self.check_result(
"label range", "loc", 20, "ix", 20, typs=["labels"], fails=TypeError
)
self.check_result(
"label range", "loc", 20, "ix", 20, typs=["ts"], axes=0, fails=TypeError
)
self.check_result(
"label range", "loc", 20, "ix", 20, typs=["floats"], axes=0, fails=KeyError
)
def test_loc_getitem_label_list(self):
# list of labels
self.check_result(
"list lbl",
"loc",
[0, 2, 4],
"ix",
[0, 2, 4],
typs=["ints", "uints"],
axes=0,
)
self.check_result(
"list lbl",
"loc",
[3, 6, 9],
"ix",
[3, 6, 9],
typs=["ints", "uints"],
axes=1,
)
self.check_result(
"list lbl",
"loc",
["a", "b", "d"],
"ix",
["a", "b", "d"],
typs=["labels"],
axes=0,
)
self.check_result(
"list lbl",
"loc",
["A", "B", "C"],
"ix",
["A", "B", "C"],
typs=["labels"],
axes=1,
)
self.check_result(
"list lbl",
"loc",
[2, 8, "null"],
"ix",
[2, 8, "null"],
typs=["mixed"],
axes=0,
)
self.check_result(
"list lbl",
"loc",
[Timestamp("20130102"), Timestamp("20130103")],
"ix",
[Timestamp("20130102"), Timestamp("20130103")],
typs=["ts"],
axes=0,
)
def test_loc_getitem_label_list_with_missing(self):
self.check_result(
"list lbl",
"loc",
[0, 1, 2],
"indexer",
[0, 1, 2],
typs=["empty"],
fails=KeyError,
)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
self.check_result(
"list lbl",
"loc",
[0, 2, 10],
"ix",
[0, 2, 10],
typs=["ints", "uints", "floats"],
axes=0,
fails=KeyError,
)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
self.check_result(
"list lbl",
"loc",
[3, 6, 7],
"ix",
[3, 6, 7],
typs=["ints", "uints", "floats"],
axes=1,
fails=KeyError,
)
# GH 17758 - MultiIndex and missing keys
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
self.check_result(
"list lbl",
"loc",
[(1, 3), (1, 4), (2, 5)],
"ix",
[(1, 3), (1, 4), (2, 5)],
typs=["multi"],
axes=0,
)
def test_getitem_label_list_with_missing(self):
s = Series(range(3), index=["a", "b", "c"])
# consistency
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
s[["a", "d"]]
s = Series(range(3))
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
s[[0, 3]]
def test_loc_getitem_label_list_fails(self):
# fails
self.check_result(
"list lbl",
"loc",
[20, 30, 40],
"ix",
[20, 30, 40],
typs=["ints", "uints"],
axes=1,
fails=KeyError,
)
def test_loc_getitem_label_array_like(self):
# array like
self.check_result(
"array like",
"loc",
Series(index=[0, 2, 4]).index,
"ix",
[0, 2, 4],
typs=["ints", "uints"],
axes=0,
)
self.check_result(
"array like",
"loc",
Series(index=[3, 6, 9]).index,
"ix",
[3, 6, 9],
typs=["ints", "uints"],
axes=1,
)
def test_loc_getitem_bool(self):
# boolean indexers
b = [True, False, True, False]
self.check_result(
"bool",
"loc",
b,
"ix",
b,
typs=["ints", "uints", "labels", "mixed", "ts", "floats"],
)
self.check_result("bool", "loc", b, "ix", b, typs=["empty"], fails=IndexError)
@pytest.mark.parametrize("index", [[True, False], [True, False, True, False]])
def test_loc_getitem_bool_diff_len(self, index):
# GH26658
s = Series([1, 2, 3])
with pytest.raises(
IndexError,
match=("Item wrong length {} instead of {}.".format(len(index), len(s))),
):
_ = s.loc[index]
def test_loc_getitem_int_slice(self):
# ok
self.check_result(
"int slice2",
"loc",
slice(2, 4),
"ix",
[2, 4],
typs=["ints", "uints"],
axes=0,
)
self.check_result(
"int slice2",
"loc",
slice(3, 6),
"ix",
[3, 6],
typs=["ints", "uints"],
axes=1,
)
def test_loc_to_fail(self):
# GH3449
df = DataFrame(
np.random.random((3, 3)), index=["a", "b", "c"], columns=["e", "f", "g"]
)
# raise a KeyError?
msg = (
r"\"None of \[Int64Index\(\[1, 2\], dtype='int64'\)\] are"
r" in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
df.loc[[1, 2], [1, 2]]
# GH 7496
# loc should not fallback
s = Series()
s.loc[1] = 1
s.loc["a"] = 2
with pytest.raises(KeyError, match=r"^-1$"):
s.loc[-1]
msg = (
r"\"None of \[Int64Index\(\[-1, -2\], dtype='int64'\)\] are"
r" in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
s.loc[[-1, -2]]
msg = (
r"\"None of \[Index\(\['4'\], dtype='object'\)\] are" r" in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
s.loc[["4"]]
s.loc[-1] = 3
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[[-1, -2]]
expected = Series([3, np.nan], index=[-1, -2])
tm.assert_series_equal(result, expected)
s["a"] = 2
msg = (
r"\"None of \[Int64Index\(\[-2\], dtype='int64'\)\] are"
r" in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
s.loc[[-2]]
del s["a"]
with pytest.raises(KeyError, match=msg):
s.loc[[-2]] = 0
# inconsistency between .loc[values] and .loc[values,:]
# GH 7999
df = DataFrame([["a"], ["b"]], index=[1, 2], columns=["value"])
msg = (
r"\"None of \[Int64Index\(\[3\], dtype='int64'\)\] are"
r" in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
df.loc[[3], :]
with pytest.raises(KeyError, match=msg):
df.loc[[3]]
def test_loc_getitem_list_with_fail(self):
# 15747
# should KeyError if *any* missing labels
s = Series([1, 2, 3])
s.loc[[2]]
with pytest.raises(
KeyError,
match=re.escape(
"\"None of [Int64Index([3], dtype='int64')] are in the [index]\""
),
):
s.loc[[3]]
# a non-match and a match
with tm.assert_produces_warning(FutureWarning):
expected = s.loc[[2, 3]]
result = s.reindex([2, 3])
tm.assert_series_equal(result, expected)
def test_loc_getitem_label_slice(self):
# label slices (with ints)
self.check_result(
"lab slice",
"loc",
slice(1, 3),
"ix",
slice(1, 3),
typs=["labels", "mixed", "empty", "ts", "floats"],
fails=TypeError,
)
# real label slices
self.check_result(
"lab slice",
"loc",
slice("a", "c"),
"ix",
slice("a", "c"),
typs=["labels"],
axes=0,
)
self.check_result(
"lab slice",
"loc",
slice("A", "C"),
"ix",
slice("A", "C"),
typs=["labels"],
axes=1,
)
self.check_result(
"ts slice",
"loc",
slice("20130102", "20130104"),
"ix",
slice("20130102", "20130104"),
typs=["ts"],
axes=0,
)
self.check_result(
"ts slice",
"loc",
slice("20130102", "20130104"),
"ix",
slice("20130102", "20130104"),
typs=["ts"],
axes=1,
fails=TypeError,
)
# GH 14316
self.check_result(
"ts slice rev",
"loc",
slice("20130104", "20130102"),
"indexer",
[0, 1, 2],
typs=["ts_rev"],
axes=0,
)
self.check_result(
"mixed slice",
"loc",
slice(2, 8),
"ix",
slice(2, 8),
typs=["mixed"],
axes=0,
fails=TypeError,
)
self.check_result(
"mixed slice",
"loc",
slice(2, 8),
"ix",
slice(2, 8),
typs=["mixed"],
axes=1,
fails=KeyError,
)
self.check_result(
"mixed slice",
"loc",
slice(2, 4, 2),
"ix",
slice(2, 4, 2),
typs=["mixed"],
axes=0,
fails=TypeError,
)
def test_loc_index(self):
# gh-17131
# a boolean index should index like a boolean numpy array
df = DataFrame(
np.random.random(size=(5, 10)),
index=["alpha_0", "alpha_1", "alpha_2", "beta_0", "beta_1"],
)
mask = df.index.map(lambda x: "alpha" in x)
expected = df.loc[np.array(mask)]
result = df.loc[mask]
tm.assert_frame_equal(result, expected)
result = df.loc[mask.values]
tm.assert_frame_equal(result, expected)
def test_loc_general(self):
df = DataFrame(
np.random.rand(4, 4),
columns=["A", "B", "C", "D"],
index=["A", "B", "C", "D"],
)
# want this to work
result = df.loc[:, "A":"B"].iloc[0:2, :]
assert (result.columns == ["A", "B"]).all()
assert (result.index == ["A", "B"]).all()
# mixed type
result = DataFrame({"a": [Timestamp("20130101")], "b": [1]}).iloc[0]
expected = Series([Timestamp("20130101"), 1], index=["a", "b"], name=0)
tm.assert_series_equal(result, expected)
assert result.dtype == object
def test_loc_setitem_consistency(self):
# GH 6149
# coerce similarly for setitem and loc when rows have a null-slice
expected = DataFrame(
{
"date": Series(0, index=range(5), dtype=np.int64),
"val": Series(range(5), dtype=np.int64),
}
)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
df.loc[:, "date"] = 0
tm.assert_frame_equal(df, expected)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
df.loc[:, "date"] = np.array(0, dtype=np.int64)
tm.assert_frame_equal(df, expected)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
df.loc[:, "date"] = np.array([0, 0, 0, 0, 0], dtype=np.int64)
tm.assert_frame_equal(df, expected)
expected = DataFrame(
{
"date": Series("foo", index=range(5)),
"val": Series(range(5), dtype=np.int64),
}
)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
df.loc[:, "date"] = "foo"
tm.assert_frame_equal(df, expected)
expected = DataFrame(
{
"date": Series(1.0, index=range(5)),
"val": Series(range(5), dtype=np.int64),
}
)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
df.loc[:, "date"] = 1.0
tm.assert_frame_equal(df, expected)
# GH 15494
# setting on frame with single row
df = DataFrame({"date": Series([Timestamp("20180101")])})
df.loc[:, "date"] = "string"
expected = DataFrame({"date": Series(["string"])})
tm.assert_frame_equal(df, expected)
def test_loc_setitem_consistency_empty(self):
# empty (essentially noops)
expected = DataFrame(columns=["x", "y"])
expected["x"] = expected["x"].astype(np.int64)
df = DataFrame(columns=["x", "y"])
df.loc[:, "x"] = 1
tm.assert_frame_equal(df, expected)
df = DataFrame(columns=["x", "y"])
df["x"] = 1
tm.assert_frame_equal(df, expected)
def test_loc_setitem_consistency_slice_column_len(self):
# .loc[:,column] setting with slice == len of the column
# GH10408
data = """Level_0,,,Respondent,Respondent,Respondent,OtherCat,OtherCat
Level_1,,,Something,StartDate,EndDate,Yes/No,SomethingElse
Region,Site,RespondentID,,,,,
Region_1,Site_1,3987227376,A,5/25/2015 10:59,5/25/2015 11:22,Yes,
Region_1,Site_1,3980680971,A,5/21/2015 9:40,5/21/2015 9:52,Yes,Yes
Region_1,Site_2,3977723249,A,5/20/2015 8:27,5/20/2015 8:41,Yes,
Region_1,Site_2,3977723089,A,5/20/2015 8:33,5/20/2015 9:09,Yes,No"""
df = pd.read_csv(StringIO(data), header=[0, 1], index_col=[0, 1, 2])
df.loc[:, ("Respondent", "StartDate")] = pd.to_datetime(
df.loc[:, ("Respondent", "StartDate")]
)
df.loc[:, ("Respondent", "EndDate")] = pd.to_datetime(
df.loc[:, ("Respondent", "EndDate")]
)
df.loc[:, ("Respondent", "Duration")] = (
df.loc[:, ("Respondent", "EndDate")]
- df.loc[:, ("Respondent", "StartDate")]
)
df.loc[:, ("Respondent", "Duration")] = df.loc[
:, ("Respondent", "Duration")
].astype("timedelta64[s]")
expected = Series(
[1380, 720, 840, 2160.0], index=df.index, name=("Respondent", "Duration")
)
tm.assert_series_equal(df[("Respondent", "Duration")], expected)
def test_loc_setitem_frame(self):
df = self.frame_labels
result = df.iloc[0, 0]
df.loc["a", "A"] = 1
result = df.loc["a", "A"]
assert result == 1
result = df.iloc[0, 0]
assert result == 1
df.loc[:, "B":"D"] = 0
expected = df.loc[:, "B":"D"]
result = df.iloc[:, 1:]
tm.assert_frame_equal(result, expected)
# GH 6254
# setting issue
df = DataFrame(index=[3, 5, 4], columns=["A"])
df.loc[[4, 3, 5], "A"] = np.array([1, 2, 3], dtype="int64")
expected = DataFrame(dict(A=Series([1, 2, 3], index=[4, 3, 5]))).reindex(
index=[3, 5, 4]
)
tm.assert_frame_equal(df, expected)
# GH 6252
# setting with an empty frame
keys1 = ["@" + str(i) for i in range(5)]
val1 = np.arange(5, dtype="int64")
keys2 = ["@" + str(i) for i in range(4)]
val2 = np.arange(4, dtype="int64")
index = list(set(keys1).union(keys2))
df = DataFrame(index=index)
df["A"] = np.nan
df.loc[keys1, "A"] = val1
df["B"] = np.nan
df.loc[keys2, "B"] = val2
expected = DataFrame(
dict(A=Series(val1, index=keys1), B=Series(val2, index=keys2))
).reindex(index=index)
tm.assert_frame_equal(df, expected)
# GH 8669
# invalid coercion of nan -> int
df = DataFrame({"A": [1, 2, 3], "B": np.nan})
df.loc[df.B > df.A, "B"] = df.A
expected = DataFrame({"A": [1, 2, 3], "B": np.nan})
tm.assert_frame_equal(df, expected)
# GH 6546
# setting with mixed labels
df = DataFrame({1: [1, 2], 2: [3, 4], "a": ["a", "b"]})
result = df.loc[0, [1, 2]]
expected = Series([1, 3], index=[1, 2], dtype=object, name=0)
tm.assert_series_equal(result, expected)
expected = DataFrame({1: [5, 2], 2: [6, 4], "a": ["a", "b"]})
df.loc[0, [1, 2]] = [5, 6]
tm.assert_frame_equal(df, expected)
def test_loc_setitem_frame_multiples(self):
# multiple setting
df = DataFrame(
{"A": ["foo", "bar", "baz"], "B": Series(range(3), dtype=np.int64)}
)
rhs = df.loc[1:2]
rhs.index = df.index[0:2]
df.loc[0:1] = rhs
expected = DataFrame(
{"A": ["bar", "baz", "baz"], "B": Series([1, 2, 2], dtype=np.int64)}
)
tm.assert_frame_equal(df, expected)
# multiple setting with frame on rhs (with M8)
df = DataFrame(
{
"date": date_range("2000-01-01", "2000-01-5"),
"val": Series(range(5), dtype=np.int64),
}
)
expected = DataFrame(
{
"date": [
Timestamp("20000101"),
Timestamp("20000102"),
Timestamp("20000101"),
Timestamp("20000102"),
Timestamp("20000103"),
],
"val": Series([0, 1, 0, 1, 2], dtype=np.int64),
}
)
rhs = df.loc[0:2]
rhs.index = df.index[2:5]
df.loc[2:4] = rhs
tm.assert_frame_equal(df, expected)
@pytest.mark.parametrize(
"indexer", [["A"], slice(None, "A", None), np.array(["A"])]
)
@pytest.mark.parametrize("value", [["Z"], np.array(["Z"])])
def test_loc_setitem_with_scalar_index(self, indexer, value):
# GH #19474
# assigning like "df.loc[0, ['A']] = ['Z']" should be evaluated
# elementwisely, not using "setter('A', ['Z'])".
df = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
df.loc[0, indexer] = value
result = df.loc[0, "A"]
assert is_scalar(result) and result == "Z"
def test_loc_coercion(self):
# 12411
df = DataFrame({"date": [Timestamp("20130101").tz_localize("UTC"), pd.NaT]})
expected = df.dtypes
result = df.iloc[[0]]
tm.assert_series_equal(result.dtypes, expected)
result = df.iloc[[1]]
tm.assert_series_equal(result.dtypes, expected)
# 12045
import datetime
df = DataFrame(
{"date": [datetime.datetime(2012, 1, 1), datetime.datetime(1012, 1, 2)]}
)
expected = df.dtypes
result = df.iloc[[0]]
tm.assert_series_equal(result.dtypes, expected)
result = df.iloc[[1]]
tm.assert_series_equal(result.dtypes, expected)
# 11594
df = DataFrame({"text": ["some words"] + [None] * 9})
expected = df.dtypes
result = df.iloc[0:2]
tm.assert_series_equal(result.dtypes, expected)
result = df.iloc[3:]
tm.assert_series_equal(result.dtypes, expected)
def test_setitem_new_key_tz(self):
# GH#12862 should not raise on assigning the second value
vals = [
pd.to_datetime(42).tz_localize("UTC"),
pd.to_datetime(666).tz_localize("UTC"),
]
expected = pd.Series(vals, index=["foo", "bar"])
ser = pd.Series()
ser["foo"] = vals[0]
ser["bar"] = vals[1]
tm.assert_series_equal(ser, expected)
ser = pd.Series()
ser.loc["foo"] = vals[0]
ser.loc["bar"] = vals[1]
tm.assert_series_equal(ser, expected)
def test_loc_non_unique(self):
# GH3659
# non-unique indexer with loc slice
# https://groups.google.com/forum/?fromgroups#!topic/pydata/zTm2No0crYs
# these are going to raise because the we are non monotonic
df = DataFrame(
{"A": [1, 2, 3, 4, 5, 6], "B": [3, 4, 5, 6, 7, 8]}, index=[0, 1, 0, 1, 2, 3]
)
msg = "'Cannot get left slice bound for non-unique label: 1'"
with pytest.raises(KeyError, match=msg):
df.loc[1:]
msg = "'Cannot get left slice bound for non-unique label: 0'"
with pytest.raises(KeyError, match=msg):
df.loc[0:]
msg = "'Cannot get left slice bound for non-unique label: 1'"
with pytest.raises(KeyError, match=msg):
df.loc[1:2]
# monotonic are ok
df = DataFrame(
{"A": [1, 2, 3, 4, 5, 6], "B": [3, 4, 5, 6, 7, 8]}, index=[0, 1, 0, 1, 2, 3]
).sort_index(axis=0)
result = df.loc[1:]
expected = DataFrame({"A": [2, 4, 5, 6], "B": [4, 6, 7, 8]}, index=[1, 1, 2, 3])
tm.assert_frame_equal(result, expected)
result = df.loc[0:]
tm.assert_frame_equal(result, df)
result = df.loc[1:2]
expected = DataFrame({"A": [2, 4, 5], "B": [4, 6, 7]}, index=[1, 1, 2])
tm.assert_frame_equal(result, expected)
def test_loc_non_unique_memory_error(self):
# GH 4280
# non_unique index with a large selection triggers a memory error
columns = list("ABCDEFG")
def gen_test(l, l2):
return pd.concat(
[
DataFrame(
np.random.randn(l, len(columns)),
index=np.arange(l),
columns=columns,
),
DataFrame(
np.ones((l2, len(columns))), index=[0] * l2, columns=columns
),
]
)
def gen_expected(df, mask):
len_mask = len(mask)
return pd.concat(
[
df.take([0]),
DataFrame(
np.ones((len_mask, len(columns))),
index=[0] * len_mask,
columns=columns,
),
df.take(mask[1:]),
]
)
df = gen_test(900, 100)
assert df.index.is_unique is False
mask = np.arange(100)
result = df.loc[mask]
expected = gen_expected(df, mask)
tm.assert_frame_equal(result, expected)
df = gen_test(900000, 100000)
assert df.index.is_unique is False
mask = np.arange(100000)
result = df.loc[mask]
expected = gen_expected(df, mask)
tm.assert_frame_equal(result, expected)
def test_loc_name(self):
# GH 3880
df = DataFrame([[1, 1], [1, 1]])
df.index.name = "index_name"
result = df.iloc[[0, 1]].index.name
assert result == "index_name"
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", FutureWarning)
result = df.ix[[0, 1]].index.name
assert result == "index_name"
result = df.loc[[0, 1]].index.name
assert result == "index_name"
def test_loc_empty_list_indexer_is_ok(self):
from pandas.util.testing import makeCustomDataframe as mkdf
df = mkdf(5, 2)
# vertical empty
tm.assert_frame_equal(
df.loc[:, []], df.iloc[:, :0], check_index_type=True, check_column_type=True
)
# horizontal empty
tm.assert_frame_equal(
df.loc[[], :], df.iloc[:0, :], check_index_type=True, check_column_type=True
)
# horizontal empty
tm.assert_frame_equal(
df.loc[[]], df.iloc[:0, :], check_index_type=True, check_column_type=True
)
def test_identity_slice_returns_new_object(self):
# GH13873
original_df = DataFrame({"a": [1, 2, 3]})
sliced_df = original_df.loc[:]
assert sliced_df is not original_df
assert original_df[:] is not original_df
# should be a shallow copy
original_df["a"] = [4, 4, 4]
assert (sliced_df["a"] == 4).all()
# These should not return copies
assert original_df is original_df.loc[:, :]
df = DataFrame(np.random.randn(10, 4))
assert df[0] is df.loc[:, 0]
# Same tests for Series
original_series = Series([1, 2, 3, 4, 5, 6])
sliced_series = original_series.loc[:]
assert sliced_series is not original_series
assert original_series[:] is not original_series
original_series[:3] = [7, 8, 9]
assert all(sliced_series[:3] == [7, 8, 9])
def test_loc_uint64(self):
# GH20722
# Test whether loc accept uint64 max value as index.
s = pd.Series(
[1, 2], index=[np.iinfo("uint64").max - 1, np.iinfo("uint64").max]
)
result = s.loc[np.iinfo("uint64").max - 1]
expected = s.iloc[0]
assert result == expected
result = s.loc[[np.iinfo("uint64").max - 1]]
expected = s.iloc[[0]]
tm.assert_series_equal(result, expected)
result = s.loc[[np.iinfo("uint64").max - 1, np.iinfo("uint64").max]]
tm.assert_series_equal(result, s)
def test_loc_setitem_empty_append(self):
# GH6173, various appends to an empty dataframe
data = [1, 2, 3]
expected = DataFrame({"x": data, "y": [None] * len(data)})
# appends to fit length of data
df = DataFrame(columns=["x", "y"])
df.loc[:, "x"] = data
tm.assert_frame_equal(df, expected)
# only appends one value
expected = DataFrame({"x": [1.0], "y": [np.nan]})
df = DataFrame(columns=["x", "y"], dtype=np.float)
df.loc[0, "x"] = expected.loc[0, "x"]
tm.assert_frame_equal(df, expected)
def test_loc_setitem_empty_append_raises(self):
# GH6173, various appends to an empty dataframe
data = [1, 2]
df = DataFrame(columns=["x", "y"])
msg = (
r"None of \[Int64Index\(\[0, 1\], dtype='int64'\)\] "
r"are in the \[index\]"
)
with pytest.raises(KeyError, match=msg):
df.loc[[0, 1], "x"] = data
msg = "cannot copy sequence with size 2 to array axis with dimension 0"
with pytest.raises(ValueError, match=msg):
df.loc[0:2, "x"] = data
def test_indexing_zerodim_np_array(self):
# GH24924
df = DataFrame([[1, 2], [3, 4]])
result = df.loc[np.array(0)]
s = pd.Series([1, 2], name=0)
tm.assert_series_equal(result, s)
def test_series_indexing_zerodim_np_array(self):
# GH24924
s = Series([1, 2])
result = s.loc[np.array(0)]
assert result == 1
def test_series_loc_getitem_label_list_missing_values():
# gh-11428
key = np.array(
["2001-01-04", "2001-01-02", "2001-01-04", "2001-01-14"], dtype="datetime64"
)
s = Series([2, 5, 8, 11], date_range("2001-01-01", freq="D", periods=4))
expected = Series([11.0, 5.0, 11.0, np.nan], index=key)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = s.loc[key]
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"columns, column_key, expected_columns, check_column_type",
[
([2011, 2012, 2013], [2011, 2012], [0, 1], True),
([2011, 2012, "All"], [2011, 2012], [0, 1], False),
([2011, 2012, "All"], [2011, "All"], [0, 2], True),
],
)
def test_loc_getitem_label_list_integer_labels(
columns, column_key, expected_columns, check_column_type
):
# gh-14836
df = DataFrame(np.random.rand(3, 3), columns=columns, index=list("ABC"))
expected = df.iloc[:, expected_columns]
result = df.loc[["A", "B", "C"], column_key]
tm.assert_frame_equal(result, expected, check_column_type=check_column_type)
| bsd-3-clause |
lokal-profil/BatchUploadTools | tests/test_helpers.py | 1 | 5766 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Unit tests for helpers.py."""
from __future__ import unicode_literals
import unittest
from collections import OrderedDict
from batchupload.helpers import (
flip_name,
flip_names,
get_all_template_entries,
cleanString,
output_block_template
)
class TestFlipName(unittest.TestCase):
"""Test the flip_name method."""
def test_flip_name_empty(self):
self.assertEqual(flip_name(''), '')
def test_flip_name_one_part(self):
input_value = 'The Name'
expected = 'The Name'
self.assertEqual(flip_name(input_value), expected)
def test_flip_name_two_parts(self):
input_value = 'Last, First'
expected = 'First Last'
self.assertEqual(flip_name(input_value), expected)
def test_flip_name_three_parts(self):
input_value = 'Last, Middle, First'
expected = 'Last, Middle, First'
self.assertEqual(flip_name(input_value), expected)
class TestFlipNames(unittest.TestCase):
"""Test the flip_names method."""
def test_flip_names_empty(self):
self.assertEqual(flip_names([]), [])
# @TODO: add test counting calls to flip_names and number/content of output
class TestGetAllTemplateEntries(unittest.TestCase):
"""Test the get_all_template_entries method."""
def test_get_all_template_entries_empty(self):
self.assertEqual(get_all_template_entries('', ''), [])
def test_get_all_template_entries_single(self):
template = 'a'
wikitext = '{{a|A|b=b|c={{c|c=pling}}}}'
expected = [{'1': 'A', 'c': '{{c|c=pling}}', 'b': 'b'}]
self.assertListEqual(get_all_template_entries(wikitext, template),
expected)
def test_get_all_template_entries_multiple(self):
template = 'a'
wikitext = '{{a|b=b}} {{a|b=b}} {{a|c}}'
expected = [{'b': 'b'}, {'b': 'b'}, {'1': 'c'}]
self.assertListEqual(get_all_template_entries(wikitext, template),
expected)
class TestCleanString(unittest.TestCase):
"""Test the cleanString method."""
def test_clean_string_empty(self):
self.assertEqual(cleanString(''), '')
def test_clean_string_normal_whitespace(self):
test_string = ' a\tb\nc\xa0d '
expected = 'a b c d'
self.assertEqual(cleanString(test_string), expected)
def test_clean_string_unusual_whitespace(self):
test_string = 'a\x8fb'
expected = 'a b'
self.assertEqual(cleanString(test_string), expected)
def test_clean_string_brackets(self):
test_string = '[{()}]'
expected = '((()))'
self.assertEqual(cleanString(test_string), expected)
def test_clean_string_separators(self):
test_string = '#|/\\'
expected = '----'
self.assertEqual(cleanString(test_string), expected)
def test_clean_string_colons(self):
test_string = ':s,a: ,:'
expected = 's,a, ,-'
self.assertEqual(cleanString(test_string), expected)
class TestOutputBlockTemplate(unittest.TestCase):
"""Test the output_block_template method."""
def setUp(self):
self.name = 'TemplateName'
self.data = OrderedDict()
self.data['param1'] = 'text1'
self.data['param10'] = 'text10'
self.data['param100'] = 'text100'
def test_output_block_template_empty(self):
expected = "{{\n}}"
self.assertEqual(output_block_template('', {}, 0), expected)
def test_output_block_template_unordered_dict(self):
data = {'param1': 'text1'}
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"}}"
self.assertEqual(
output_block_template(self.name, data, 0),
expected)
def test_output_block_template_set_padding(self):
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"| param10 = text10\n" \
"| param100 = text100\n" \
"}}"
self.assertEqual(
output_block_template(self.name, self.data, 15),
expected)
def test_output_block_template_no_padding(self):
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"| param10 = text10\n" \
"| param100 = text100\n" \
"}}"
self.assertEqual(
output_block_template(self.name, self.data, 0),
expected)
def test_output_block_template_auto_padding(self):
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"| param10 = text10\n" \
"| param100 = text100\n" \
"}}"
self.assertEqual(
output_block_template(self.name, self.data, None),
expected)
def test_output_block_template_ommit_entry(self):
self.data['special'] = None
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"| param10 = text10\n" \
"| param100 = text100\n" \
"}}"
self.assertEqual(
output_block_template(self.name, self.data, None),
expected)
def test_output_block_template_empty_entry(self):
self.data['special'] = ''
expected = "{{TemplateName\n" \
"| param1 = text1\n" \
"| param10 = text10\n" \
"| param100 = text100\n" \
"| special = \n" \
"}}"
self.assertEqual(
output_block_template(self.name, self.data, None),
expected)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.