repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
ibinti/intellij-community | python/helpers/pycharm/_jb_runner_tools.py | 2 | 14537 | # coding=utf-8
"""
Tools to implement runners (https://confluence.jetbrains.com/display/~link/PyCharm+test+runners+protocol)
"""
import atexit
import _jb_utils
import os
import re
import sys
from teamcity import teamcity_presence_env_var, messages
# Some runners need it to "detect" TC and start protocol
if teamcity_presence_env_var not in os.environ:
os.environ[teamcity_presence_env_var] = "LOCAL"
# Providing this env variable disables output buffering.
# anything sent to stdout/stderr goes to IDE directly, not after test is over like it is done by default.
# out and err are not in sync, so output may go to wrong test
JB_DISABLE_BUFFERING = "JB_DISABLE_BUFFERING" in os.environ
PROJECT_DIR = os.getcwd()
def _parse_parametrized(part):
"""
Support nose generators / py.test parameters and other functions that provides names like foo(1,2)
Until https://github.com/JetBrains/teamcity-messages/issues/121, all such tests are provided
with parentheses.
Tests with docstring are reported in similar way but they have space before parenthesis and should be ignored
by this function
"""
match = re.match("^([^\\s)(]+)(\\(.+\\))$", part)
if not match:
return [part]
else:
return [match.group(1), match.group(2)]
# Monkeypatching TC to pass location hint
class _TreeManager(object):
"""
Manages output tree by building it from flat test names.
"""
def __init__(self):
super(_TreeManager, self).__init__()
# Currently active branch as list. New nodes go to this branch
self.current_branch = []
# node unique name to its nodeId
self._node_ids_dict = {}
# Node id mast be incremented for each new branch
self._max_node_id = 0
def _calculate_relation(self, branch_as_list):
"""
Get relation of branch_as_list to current branch.
:return: tuple. First argument could be: "same", "child", "parent" or "sibling"(need to start new tree)
Second argument is relative path from current branch to child if argument is child
"""
if branch_as_list == self.current_branch:
return "same", None
hierarchy_name_len = len(branch_as_list)
current_branch_len = len(self.current_branch)
if hierarchy_name_len > current_branch_len and branch_as_list[0:current_branch_len] == self.current_branch:
return "child", branch_as_list[current_branch_len:]
if hierarchy_name_len < current_branch_len and self.current_branch[0:hierarchy_name_len] == branch_as_list:
return "parent", None
return "sibling", None
def _add_new_node(self, new_node_name):
"""
Adds new node to branch
"""
self.current_branch.append(new_node_name)
self._max_node_id += 1
self._node_ids_dict[".".join(self.current_branch)] = self._max_node_id
def level_opened(self, test_as_list, func_to_open):
"""
To be called on test start.
:param test_as_list: test name splitted as list
:param func_to_open: func to be called if test can open new level
:return: None if new level opened, or tuple of command client should execute and try opening level again
Command is "open" (open provided level) or "close" (close it). Second item is test name as list
"""
relation, relative_path = self._calculate_relation(test_as_list)
if relation == 'same':
return # Opening same level?
if relation == 'child':
# If one level -- open new level gracefully
if len(relative_path) == 1:
self._add_new_node(relative_path[0])
func_to_open()
return None
else:
# Open previous level
return "open", self.current_branch + relative_path[0:1]
if relation == "sibling":
if self.current_branch:
# Different tree, close whole branch
return "close", self.current_branch
else:
return None
if relation == 'parent':
# Opening parent? Insane
pass
def level_closed(self, test_as_list, func_to_close):
"""
To be called on test end or failure.
See level_opened doc.
"""
relation, relative_path = self._calculate_relation(test_as_list)
if relation == 'same':
# Closing current level
func_to_close()
self.current_branch.pop()
if relation == 'child':
return None
if relation == 'sibling':
pass
if relation == 'parent':
return "close", self.current_branch
@property
def parent_branch(self):
return self.current_branch[:-1] if self.current_branch else None
def _get_node_id(self, branch):
return self._node_ids_dict[".".join(branch)]
@property
def node_ids(self):
"""
:return: (current_node_id, parent_node_id)
"""
current = self._get_node_id(self.current_branch)
parent = self._get_node_id(self.parent_branch) if self.parent_branch else "0"
return str(current), str(parent)
def close_all(self):
if not self.current_branch:
return None
return "close", self.current_branch
TREE_MANAGER = _TreeManager()
_old_service_messages = messages.TeamcityServiceMessages
PARSE_FUNC = None
class NewTeamcityServiceMessages(_old_service_messages):
_latest_subtest_result = None
def message(self, messageName, **properties):
if messageName in set(["enteredTheMatrix", "testCount"]):
_old_service_messages.message(self, messageName, **properties)
return
try:
# Report directory so Java site knows which folder to resolve names against
# tests with docstrings are reported in format "test.name (some test here)".
# text should be part of name, but not location.
possible_location = str(properties["name"])
loc = possible_location.find("(")
if loc > 0:
possible_location = possible_location[:loc].strip()
properties["locationHint"] = "python<{0}>://{1}".format(PROJECT_DIR, possible_location)
except KeyError:
# If message does not have name, then it is not test
# Simply pass it
_old_service_messages.message(self, messageName, **properties)
return
# Shortcut for name
try:
properties["name"] = str(properties["name"]).split(".")[-1]
except IndexError:
pass
current, parent = TREE_MANAGER.node_ids
properties["nodeId"] = str(current)
properties["parentNodeId"] = str(parent)
_old_service_messages.message(self, messageName, **properties)
def _test_to_list(self, test_name):
"""
Splits test name to parts to use it as list.
It most cases dot is used, but runner may provide custom function
"""
parts = test_name.split(".")
result = []
for part in parts:
result += _parse_parametrized(part)
return result
def _fix_setup_teardown_name(self, test_name):
"""
Hack to rename setup and teardown methods to much real python signatures
"""
try:
return {"test setup": "setUpClass", "test teardown": "tearDownClass"}[test_name]
except KeyError:
return test_name
# Blocks are used for 2 cases now:
# 1) Unittest subtests (only closed, opened by subTestBlockOpened)
# 2) setup/teardown (does not work, see https://github.com/JetBrains/teamcity-messages/issues/114)
# def blockOpened(self, name, flowId=None):
# self.testStarted(".".join(TREE_MANAGER.current_branch + [self._fix_setup_teardown_name(name)]))
def blockClosed(self, name, flowId=None):
# If _latest_subtest_result is not set or does not exist we closing setup method, not a subtest
try:
if not self._latest_subtest_result:
return
except AttributeError:
return
# closing subtest
test_name = ".".join(TREE_MANAGER.current_branch)
if self._latest_subtest_result in set(["Failure", "Error"]):
self.testFailed(test_name)
if self._latest_subtest_result == "Skip":
self.testIgnored(test_name)
self.testFinished(test_name)
self._latest_subtest_result = None
def subTestBlockOpened(self, name, subTestResult, flowId=None):
self.testStarted(".".join(TREE_MANAGER.current_branch + [name]))
self._latest_subtest_result = subTestResult
def testStarted(self, testName, captureStandardOutput=None, flowId=None, is_suite=False):
test_name_as_list = self._test_to_list(testName)
testName = ".".join(test_name_as_list)
def _write_start_message():
# testName, captureStandardOutput, flowId
args = {"name": testName, "captureStandardOutput": captureStandardOutput}
if is_suite:
self.message("testSuiteStarted", **args)
else:
self.message("testStarted", **args)
commands = TREE_MANAGER.level_opened(self._test_to_list(testName), _write_start_message)
if commands:
self.do_command(commands[0], commands[1])
self.testStarted(testName, captureStandardOutput)
def testFailed(self, testName, message='', details='', flowId=None):
testName = ".".join(self._test_to_list(testName))
args = {"name": testName, "message": str(message),
"details": details}
self.message("testFailed", **args)
def testFinished(self, testName, testDuration=None, flowId=None, is_suite=False):
testName = ".".join(self._test_to_list(testName))
def _write_finished_message():
# testName, captureStandardOutput, flowId
current, parent = TREE_MANAGER.node_ids
args = {"nodeId": current, "parentNodeId": parent, "name": testName}
# TODO: Doc copy/paste with parent, extract
if testDuration is not None:
duration_ms = testDuration.days * 86400000 + \
testDuration.seconds * 1000 + \
int(testDuration.microseconds / 1000)
args["duration"] = str(duration_ms)
if is_suite:
self.message("testSuiteFinished", **args)
else:
self.message("testFinished", **args)
commands = TREE_MANAGER.level_closed(self._test_to_list(testName), _write_finished_message)
if commands:
self.do_command(commands[0], commands[1])
self.testFinished(testName, testDuration)
def do_command(self, command, test):
"""
Executes commands, returned by level_closed and level_opened
"""
test_name = ".".join(test)
# By executing commands we open or close suites(branches) since tests(leaves) are always reported by runner
if command == "open":
self.testStarted(test_name, is_suite=True)
else:
self.testFinished(test_name, is_suite=True)
def close_all(self):
"""
Closes all tests
"""
commands = TREE_MANAGER.close_all()
if commands:
self.do_command(commands[0], commands[1])
self.close_all()
messages.TeamcityServiceMessages = NewTeamcityServiceMessages
# Monkeypatched
def jb_patch_separator(targets, fs_glue, python_glue, fs_to_python_glue):
"""
Converts python target if format "/path/foo.py::parts.to.python" provided by Java to
python specific format
:param targets: list of dot-separated targets
:param fs_glue: how to glue fs parts of target. I.e.: module "eggs" in "spam" package is "spam[fs_glue]eggs"
:param python_glue: how to glue python parts (glue between class and function etc)
:param fs_to_python_glue: between last fs-part and first python part
:return: list of targets with patched separators
"""
if not targets:
return []
def _patch_target(target):
# /path/foo.py::parts.to.python
match = re.match("^(:?(.+)[.]py::)?(.+)$", target)
assert match, "unexpected string: {0}".format(target)
fs_part = match.group(2)
python_part = match.group(3).replace(".", python_glue)
if fs_part:
return fs_part.replace("/", fs_glue) + fs_to_python_glue + python_part
else:
return python_part
return map(_patch_target, targets)
def jb_start_tests():
"""
Parses arguments, starts protocol and returns tuple of arguments
:return: (string with path or None, list of targets or None, list of additional arguments)
:param func_to_parse function that accepts each part of test name and returns list to be used instead of it.
It may return list with only one element (name itself) if name is the same or split names to several parts
"""
# Handle additional args after --
additional_args = []
try:
index = sys.argv.index("--")
additional_args = sys.argv[index + 1:]
del sys.argv[index:]
except ValueError:
pass
utils = _jb_utils.VersionAgnosticUtils()
namespace = utils.get_options(
_jb_utils.OptionDescription('--path', 'Path to file or folder to run'),
_jb_utils.OptionDescription('--target', 'Python target to run', "append"))
del sys.argv[1:] # Remove all args
NewTeamcityServiceMessages().message('enteredTheMatrix')
# PyCharm helpers dir is first dir in sys.path because helper is launched.
# But sys.path should be same as when launched with test runner directly
try:
if os.path.abspath(sys.path[0]) == os.path.abspath(os.environ["PYCHARM_HELPERS_DIR"]):
sys.path.pop(0)
except KeyError:
pass
return namespace.path, namespace.target, additional_args
def _close_all_tests():
NewTeamcityServiceMessages().close_all()
atexit.register(_close_all_tests)
def jb_doc_args(framework_name, args):
"""
Runner encouraged to report its arguments to user with aid of this function
"""
print("Launching {0} with arguments {1} in {2}\n".format(framework_name, " ".join(args), PROJECT_DIR))
| apache-2.0 |
imsparsh/python-for-android | python-modules/twisted/twisted/web/wsgi.py | 52 | 14117 | # Copyright (c) 2008-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An implementation of
U{Web Resource Gateway Interface<http://www.python.org/dev/peps/pep-0333/>}.
"""
__metaclass__ = type
from sys import exc_info
from zope.interface import implements
from twisted.python.log import msg, err
from twisted.python.failure import Failure
from twisted.web.resource import IResource
from twisted.web.server import NOT_DONE_YET
from twisted.web.http import INTERNAL_SERVER_ERROR
class _ErrorStream:
"""
File-like object instances of which are used as the value for the
C{'wsgi.errors'} key in the C{environ} dictionary passed to the application
object.
This simply passes writes on to L{logging<twisted.python.log>} system as
error events from the C{'wsgi'} system. In the future, it may be desirable
to expose more information in the events it logs, such as the application
object which generated the message.
"""
def write(self, bytes):
"""
Generate an event for the logging system with the given bytes as the
message.
This is called in a WSGI application thread, not the I/O thread.
"""
msg(bytes, system='wsgi', isError=True)
def writelines(self, iovec):
"""
Join the given lines and pass them to C{write} to be handled in the
usual way.
This is called in a WSGI application thread, not the I/O thread.
@param iovec: A C{list} of C{'\\n'}-terminated C{str} which will be
logged.
"""
self.write(''.join(iovec))
def flush(self):
"""
Nothing is buffered, so flushing does nothing. This method is required
to exist by PEP 333, though.
This is called in a WSGI application thread, not the I/O thread.
"""
class _InputStream:
"""
File-like object instances of which are used as the value for the
C{'wsgi.input'} key in the C{environ} dictionary passed to the application
object.
This only exists to make the handling of C{readline(-1)} consistent across
different possible underlying file-like object implementations. The other
supported methods pass through directly to the wrapped object.
"""
def __init__(self, input):
"""
Initialize the instance.
This is called in the I/O thread, not a WSGI application thread.
"""
self._wrapped = input
def read(self, size=None):
"""
Pass through to the underlying C{read}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Avoid passing None because cStringIO and file don't like it.
if size is None:
return self._wrapped.read()
return self._wrapped.read(size)
def readline(self, size=None):
"""
Pass through to the underlying C{readline}, with a size of C{-1} replaced
with a size of C{None}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Check for -1 because StringIO doesn't handle it correctly. Check for
# None because files and tempfiles don't accept that.
if size == -1 or size is None:
return self._wrapped.readline()
return self._wrapped.readline(size)
def readlines(self, size=None):
"""
Pass through to the underlying C{readlines}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Avoid passing None because cStringIO and file don't like it.
if size is None:
return self._wrapped.readlines()
return self._wrapped.readlines(size)
def __iter__(self):
"""
Pass through to the underlying C{__iter__}.
This is called in a WSGI application thread, not the I/O thread.
"""
return iter(self._wrapped)
class _WSGIResponse:
"""
Helper for L{WSGIResource} which drives the WSGI application using a
threadpool and hooks it up to the L{Request}.
@ivar started: A C{bool} indicating whether or not the response status and
headers have been written to the request yet. This may only be read or
written in the WSGI application thread.
@ivar reactor: An L{IReactorThreads} provider which is used to call methods
on the request in the I/O thread.
@ivar threadpool: A L{ThreadPool} which is used to call the WSGI
application object in a non-I/O thread.
@ivar application: The WSGI application object.
@ivar request: The L{Request} upon which the WSGI environment is based and
to which the application's output will be sent.
@ivar environ: The WSGI environment C{dict}.
@ivar status: The HTTP response status C{str} supplied to the WSGI
I{start_response} callable by the application.
@ivar headers: A list of HTTP response headers supplied to the WSGI
I{start_response} callable by the application.
@ivar _requestFinished: A flag which indicates whether it is possible to
generate more response data or not. This is C{False} until
L{Request.notifyFinish} tells us the request is done, then C{True}.
"""
_requestFinished = False
def __init__(self, reactor, threadpool, application, request):
self.started = False
self.reactor = reactor
self.threadpool = threadpool
self.application = application
self.request = request
self.request.notifyFinish().addBoth(self._finished)
if request.prepath:
scriptName = '/' + '/'.join(request.prepath)
else:
scriptName = ''
if request.postpath:
pathInfo = '/' + '/'.join(request.postpath)
else:
pathInfo = ''
parts = request.uri.split('?', 1)
if len(parts) == 1:
queryString = ''
else:
queryString = parts[1]
self.environ = {
'REQUEST_METHOD': request.method,
'REMOTE_ADDR': request.getClientIP(),
'SCRIPT_NAME': scriptName,
'PATH_INFO': pathInfo,
'QUERY_STRING': queryString,
'CONTENT_TYPE': request.getHeader('content-type') or '',
'CONTENT_LENGTH': request.getHeader('content-length') or '',
'SERVER_NAME': request.getRequestHostname(),
'SERVER_PORT': str(request.getHost().port),
'SERVER_PROTOCOL': request.clientproto}
for name, values in request.requestHeaders.getAllRawHeaders():
name = 'HTTP_' + name.upper().replace('-', '_')
# It might be preferable for http.HTTPChannel to clear out
# newlines.
self.environ[name] = ','.join([
v.replace('\n', ' ') for v in values])
self.environ.update({
'wsgi.version': (1, 0),
'wsgi.url_scheme': request.isSecure() and 'https' or 'http',
'wsgi.run_once': False,
'wsgi.multithread': True,
'wsgi.multiprocess': False,
'wsgi.errors': _ErrorStream(),
# Attend: request.content was owned by the I/O thread up until
# this point. By wrapping it and putting the result into the
# environment dictionary, it is effectively being given to
# another thread. This means that whatever it is, it has to be
# safe to access it from two different threads. The access
# *should* all be serialized (first the I/O thread writes to
# it, then the WSGI thread reads from it, then the I/O thread
# closes it). However, since the request is made available to
# arbitrary application code during resource traversal, it's
# possible that some other code might decide to use it in the
# I/O thread concurrently with its use in the WSGI thread.
# More likely than not, this will break. This seems like an
# unlikely possibility to me, but if it is to be allowed,
# something here needs to change. -exarkun
'wsgi.input': _InputStream(request.content)})
def _finished(self, ignored):
"""
Record the end of the response generation for the request being
serviced.
"""
self._requestFinished = True
def startResponse(self, status, headers, excInfo=None):
"""
The WSGI I{start_response} callable. The given values are saved until
they are needed to generate the response.
This will be called in a non-I/O thread.
"""
if self.started and excInfo is not None:
raise excInfo[0], excInfo[1], excInfo[2]
self.status = status
self.headers = headers
return self.write
def write(self, bytes):
"""
The WSGI I{write} callable returned by the I{start_response} callable.
The given bytes will be written to the response body, possibly flushing
the status and headers first.
This will be called in a non-I/O thread.
"""
def wsgiWrite(started):
if not started:
self._sendResponseHeaders()
self.request.write(bytes)
self.reactor.callFromThread(wsgiWrite, self.started)
self.started = True
def _sendResponseHeaders(self):
"""
Set the response code and response headers on the request object, but
do not flush them. The caller is responsible for doing a write in
order for anything to actually be written out in response to the
request.
This must be called in the I/O thread.
"""
code, message = self.status.split(None, 1)
code = int(code)
self.request.setResponseCode(code, message)
# twisted.web.server.Request.process always addes a content-type
# response header. That's not appropriate for us.
self.request.responseHeaders.removeHeader('content-type')
for name, value in self.headers:
# Don't allow the application to control these required headers.
if name.lower() not in ('server', 'date'):
self.request.responseHeaders.addRawHeader(name, value)
def start(self):
"""
Start the WSGI application in the threadpool.
This must be called in the I/O thread.
"""
self.threadpool.callInThread(self.run)
def run(self):
"""
Call the WSGI application object, iterate it, and handle its output.
This must be called in a non-I/O thread (ie, a WSGI application
thread).
"""
try:
appIterator = self.application(self.environ, self.startResponse)
for elem in appIterator:
if elem:
self.write(elem)
if self._requestFinished:
break
close = getattr(appIterator, 'close', None)
if close is not None:
close()
except:
def wsgiError(started, type, value, traceback):
err(Failure(value, type, traceback), "WSGI application error")
if started:
self.request.transport.loseConnection()
else:
self.request.setResponseCode(INTERNAL_SERVER_ERROR)
self.request.finish()
self.reactor.callFromThread(wsgiError, self.started, *exc_info())
else:
def wsgiFinish(started):
if not self._requestFinished:
if not started:
self._sendResponseHeaders()
self.request.finish()
self.reactor.callFromThread(wsgiFinish, self.started)
self.started = True
class WSGIResource:
"""
An L{IResource} implementation which delegates responsibility for all
resources hierarchically inferior to it to a WSGI application.
@ivar _reactor: An L{IReactorThreads} provider which will be passed on to
L{_WSGIResponse} to schedule calls in the I/O thread.
@ivar _threadpool: A L{ThreadPool} which will be passed on to
L{_WSGIResponse} to run the WSGI application object.
@ivar _application: The WSGI application object.
"""
implements(IResource)
# Further resource segments are left up to the WSGI application object to
# handle.
isLeaf = True
def __init__(self, reactor, threadpool, application):
self._reactor = reactor
self._threadpool = threadpool
self._application = application
def render(self, request):
"""
Turn the request into the appropriate C{environ} C{dict} suitable to be
passed to the WSGI application object and then pass it on.
The WSGI application object is given almost complete control of the
rendering process. C{NOT_DONE_YET} will always be returned in order
and response completion will be dictated by the application object, as
will the status, headers, and the response body.
"""
response = _WSGIResponse(
self._reactor, self._threadpool, self._application, request)
response.start()
return NOT_DONE_YET
def getChildWithDefault(self, name, request):
"""
Reject attempts to retrieve a child resource. All path segments beyond
the one which refers to this resource are handled by the WSGI
application object.
"""
raise RuntimeError("Cannot get IResource children from WSGIResource")
def putChild(self, path, child):
"""
Reject attempts to add a child resource to this resource. The WSGI
application object handles all path segments beneath this resource, so
L{IResource} children can never be found.
"""
raise RuntimeError("Cannot put IResource children under WSGIResource")
__all__ = ['WSGIResource']
| apache-2.0 |
vimagick/youtube-dl | youtube_dl/extractor/helsinki.py | 165 | 1360 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import js_to_json
class HelsinkiIE(InfoExtractor):
IE_DESC = 'helsinki.fi'
_VALID_URL = r'https?://video\.helsinki\.fi/Arkisto/flash\.php\?id=(?P<id>\d+)'
_TEST = {
'url': 'http://video.helsinki.fi/Arkisto/flash.php?id=20258',
'info_dict': {
'id': '20258',
'ext': 'mp4',
'title': 'Tietotekniikkafoorumi-iltapäivä',
'description': 'md5:f5c904224d43c133225130fe156a5ee0',
},
'params': {
'skip_download': True, # RTMP
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
params = self._parse_json(self._html_search_regex(
r'(?s)jwplayer\("player"\).setup\((\{.*?\})\);',
webpage, 'player code'), video_id, transform_source=js_to_json)
formats = [{
'url': s['file'],
'ext': 'mp4',
} for s in params['sources']]
self._sort_formats(formats)
return {
'id': video_id,
'title': self._og_search_title(webpage).replace('Video: ', ''),
'description': self._og_search_description(webpage),
'formats': formats,
}
| unlicense |
mooflu/iTerm2 | tests/esctest/tests/decrqm.py | 31 | 13222 | from esc import NUL
import escargs
import esccmd
import escio
import esclog
from escutil import AssertEQ, AssertScreenCharsInRectEqual, GetScreenSize, knownBug, optionRequired, vtLevel
from esctypes import Point, Rect
class DECRQMTests(object):
"""DECANM is not tested because there doesn't seem to be any way to
exit VT52 mode and subsequent tests are broken."""
def requestAnsiMode(self, mode):
esccmd.DECRQM(mode, DEC=False)
return escio.ReadCSI('$y')
def requestDECMode(self, mode):
esccmd.DECRQM(mode, DEC=True)
return escio.ReadCSI('$y', '?')
def doModifiableAnsiTest(self, mode):
before = self.requestAnsiMode(mode)
if before[1] == 2:
esccmd.SM(mode)
AssertEQ(self.requestAnsiMode(mode), [ mode, 1 ])
esccmd.RM(mode)
AssertEQ(self.requestAnsiMode(mode), [ mode, 2 ])
else:
esccmd.RM(mode)
AssertEQ(self.requestAnsiMode(mode), [ mode, 2 ])
esccmd.SM(mode)
AssertEQ(self.requestAnsiMode(mode), [ mode, 1 ])
def doPermanentlyResetAnsiTest(self, mode):
AssertEQ(self.requestAnsiMode(mode), [ mode, 4 ])
def doModifiableDecTest(self, mode):
before = self.requestDECMode(mode)
if before[1] == 2:
esccmd.DECSET(mode)
AssertEQ(self.requestDECMode(mode), [ mode, 1 ])
esccmd.DECRESET(mode)
AssertEQ(self.requestDECMode(mode), [ mode, 2 ])
else:
esccmd.DECRESET(mode)
AssertEQ(self.requestDECMode(mode), [ mode, 2 ])
esccmd.DECSET(mode)
AssertEQ(self.requestDECMode(mode), [ mode, 1 ])
def doPermanentlyResetDecTest(self, mode):
AssertEQ(self.requestDECMode(mode), [ mode, 4 ])
# Modifiable ANSI modes ----------------------------------------------------
@knownBug(terminal="iTerm2", reason="DECRQM not supported.")
def test_DECRQM(self):
"""See if DECRQM works at all. Unlike all the other tests, this one should
never have shouldTry=False set. That way if a terminal with a knownBug
begins supporting DECRQM, this will cease to fail, which is your sign to
remove the 'DECRQM not supported' knownBug from other tests for that
terminal."""
AssertEQ(len(self.requestAnsiMode(esccmd.IRM)), 2)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_KAM(self):
self.doModifiableAnsiTest(esccmd.KAM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_IRM(self):
self.doModifiableAnsiTest(esccmd.IRM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_SRM(self):
self.doModifiableAnsiTest(esccmd.SRM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_LNM(self):
self.doModifiableAnsiTest(esccmd.LNM)
# Permanently reset ANSI modes ----------------------------------------------
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_GATM(self):
self.doPermanentlyResetAnsiTest(esccmd.GATM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_SRTM(self):
self.doPermanentlyResetAnsiTest(esccmd.SRTM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_VEM(self):
self.doPermanentlyResetAnsiTest(esccmd.VEM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_HEM(self):
self.doPermanentlyResetAnsiTest(esccmd.HEM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_PUM(self):
self.doPermanentlyResetAnsiTest(esccmd.PUM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_FEAM(self):
self.doPermanentlyResetAnsiTest(esccmd.FEAM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_FETM(self):
self.doPermanentlyResetAnsiTest(esccmd.FETM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_MATM(self):
self.doPermanentlyResetAnsiTest(esccmd.MATM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_TTM(self):
self.doPermanentlyResetAnsiTest(esccmd.TTM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_SATM(self):
self.doPermanentlyResetAnsiTest(esccmd.SATM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_TSM(self):
self.doPermanentlyResetAnsiTest(esccmd.TSM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_ANSI_EBM(self):
self.doPermanentlyResetAnsiTest(esccmd.EBM)
# Modifiable DEC modes ------------------------------------------------------
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECCKM(self):
self.doModifiableDecTest(esccmd.DECCKM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECCOLM(self):
needsPermission = escargs.args.expected_terminal in [ "xterm", "iTerm2" ]
if needsPermission:
esccmd.DECSET(esccmd.Allow80To132)
self.doModifiableDecTest(esccmd.DECCOLM)
if needsPermission:
esccmd.DECRESET(esccmd.Allow80To132)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECSCLM(self):
self.doModifiableDecTest(esccmd.DECSCLM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECSCNM(self):
self.doModifiableDecTest(esccmd.DECSCNM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECOM(self):
self.doModifiableDecTest(esccmd.DECOM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECAWM(self):
self.doModifiableDecTest(esccmd.DECAWM)
@knownBug(terminal="xterm",
reason="xterm always returns 4 (permanently reset)")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECARM(self):
self.doModifiableDecTest(esccmd.DECARM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECPFF(self):
self.doModifiableDecTest(esccmd.DECPFF)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECPEX(self):
self.doModifiableDecTest(esccmd.DECPEX)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECTCEM(self):
self.doModifiableDecTest(esccmd.DECTCEM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECRLM(self):
self.doModifiableDecTest(esccmd.DECRLM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECHEBM(self):
self.doModifiableDecTest(esccmd.DECHEBM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECHEM(self):
"""Hebrew encoding mode."""
self.doModifiableDecTest(esccmd.DECHEM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECNRCM(self):
self.doModifiableDecTest(esccmd.DECNRCM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECNAKB(self):
self.doModifiableDecTest(esccmd.DECNAKB)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECVCCM(self):
self.doModifiableDecTest(esccmd.DECVCCM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECPCCM(self):
self.doModifiableDecTest(esccmd.DECPCCM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECNKM(self):
self.doModifiableDecTest(esccmd.DECNKM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECBKM(self):
self.doModifiableDecTest(esccmd.DECBKM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECKBUM(self):
self.doModifiableDecTest(esccmd.DECKBUM)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECVSSM(self):
self.doModifiableDecTest(esccmd.DECVSSM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECXRLM(self):
self.doModifiableDecTest(esccmd.DECXRLM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECKPM(self):
self.doModifiableDecTest(esccmd.DECKPM)
@vtLevel(5)
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
@optionRequired(terminal="xterm",
option=escargs.XTERM_WINOPS_ENABLED)
def test_DECRQM_DEC_DECNCSM(self):
needsPermission = escargs.args.expected_terminal in [ "xterm", "iTerm2" ]
if needsPermission:
esccmd.DECSET(esccmd.Allow80To132)
self.doModifiableDecTest(esccmd.DECNCSM)
needsPermission = escargs.args.expected_terminal in [ "xterm", "iTerm2" ]
if needsPermission:
esccmd.DECRESET(esccmd.Allow80To132)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECRLCM(self):
self.doModifiableDecTest(esccmd.DECRLCM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECCRTSM(self):
self.doModifiableDecTest(esccmd.DECCRTSM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECARSM(self):
self.doModifiableDecTest(esccmd.DECARSM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECMCM(self):
self.doModifiableDecTest(esccmd.DECMCM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECAAM(self):
self.doModifiableDecTest(esccmd.DECAAM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECCANSM(self):
self.doModifiableDecTest(esccmd.DECCANSM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECNULM(self):
self.doModifiableDecTest(esccmd.DECNULM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECHDPXM(self):
"""Set duplex."""
self.doModifiableDecTest(esccmd.DECHDPXM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECESKM(self):
self.doModifiableDecTest(esccmd.DECESKM)
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECOSCNM(self):
self.doModifiableDecTest(esccmd.DECOSCNM)
# Permanently Reset DEC Modes -----------------------------------------------
@knownBug(terminal="xterm", reason="Not supported")
@knownBug(terminal="iTerm2", reason="DECRQM not supported.", shouldTry=False)
def test_DECRQM_DEC_DECHCCM(self):
"""Here's what the official docs have to say:
Normally, when you horizontally change the size of your window or
terminal (for example, from 132 columns to 80 columns), the cursor is not
visible. You can change this default by clicking on the Horizontal
Cursor Coupling option.
I also found this on carleton.edu:
Check the Horizontal Cursor Coupling check box if you want the horizontal
scrollbar to be adjusted automatically when the cursor moves to always
keep the column with the cursor on the visible portion of the display.
I gather this is irrelevant if your terminal doesn't support horizontal
scrolling."""
self.doPermanentlyResetDecTest(esccmd.DECHCCM)
| gpl-2.0 |
Philippe12/external_chromium_org | tools/valgrind/memcheck/PRESUBMIT.py | 77 | 3224 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
import re
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
sup_regex = re.compile('suppressions.*\.txt$')
suppressions = {}
errors = []
check_for_memcheck = False
# skip_next_line has 3 possible values:
# - False: don't skip the next line.
# - 'skip_suppression_name': the next line is a suppression name, skip.
# - 'skip_param': the next line is a system call parameter error, skip.
skip_next_line = False
for f in filter(lambda x: sup_regex.search(x.LocalPath()),
input_api.AffectedFiles()):
for line, line_num in zip(f.NewContents(),
xrange(1, len(f.NewContents()) + 1)):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
if skip_next_line == 'skip_suppression_name':
if 'insert_a_suppression_name_here' in line:
errors.append('"insert_a_suppression_name_here" is not a valid '
'suppression name')
if suppressions.has_key(line):
if f.LocalPath() == suppressions[line][1]:
errors.append('suppression with name "%s" at %s line %s '
'has already been defined at line %s' %
(line, f.LocalPath(), line_num,
suppressions[line][1]))
else:
errors.append('suppression with name "%s" at %s line %s '
'has already been defined at %s line %s' %
(line, f.LocalPath(), line_num,
suppressions[line][0], suppressions[line][1]))
else:
suppressions[line] = (f, line_num)
check_for_memcheck = True;
skip_next_line = False
continue
if check_for_memcheck:
if not line.startswith('Memcheck:'):
errors.append('"%s" should be "Memcheck:..." in %s line %s' %
(line, f.LocalPath(), line_num))
check_for_memcheck = False;
if line == '{':
skip_next_line = 'skip_suppression_name'
continue
if line == "Memcheck:Param":
skip_next_line = 'skip_param'
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line.startswith('Memcheck:') or line == '}' or
line == '...'):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
def GetPreferredTrySlaves():
return ['linux_valgrind', 'mac_valgrind']
| bsd-3-clause |
hfp/tensorflow-xsmm | tensorflow/contrib/image/python/ops/interpolate_spline.py | 19 | 11651 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Polyharmonic spline interpolation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
EPSILON = 0.0000000001
def _cross_squared_distance_matrix(x, y):
"""Pairwise squared distance between two (batch) matrices' rows (2nd dim).
Computes the pairwise distances between rows of x and rows of y
Args:
x: [batch_size, n, d] float `Tensor`
y: [batch_size, m, d] float `Tensor`
Returns:
squared_dists: [batch_size, n, m] float `Tensor`, where
squared_dists[b,i,j] = ||x[b,i,:] - y[b,j,:]||^2
"""
x_norm_squared = math_ops.reduce_sum(math_ops.square(x), 2)
y_norm_squared = math_ops.reduce_sum(math_ops.square(y), 2)
# Expand so that we can broadcast.
x_norm_squared_tile = array_ops.expand_dims(x_norm_squared, 2)
y_norm_squared_tile = array_ops.expand_dims(y_norm_squared, 1)
x_y_transpose = math_ops.matmul(x, y, adjoint_b=True)
# squared_dists[b,i,j] = ||x_bi - y_bj||^2 = x_bi'x_bi- 2x_bi'x_bj + x_bj'x_bj
squared_dists = x_norm_squared_tile - 2 * x_y_transpose + y_norm_squared_tile
return squared_dists
def _pairwise_squared_distance_matrix(x):
"""Pairwise squared distance among a (batch) matrix's rows (2nd dim).
This saves a bit of computation vs. using _cross_squared_distance_matrix(x,x)
Args:
x: `[batch_size, n, d]` float `Tensor`
Returns:
squared_dists: `[batch_size, n, n]` float `Tensor`, where
squared_dists[b,i,j] = ||x[b,i,:] - x[b,j,:]||^2
"""
x_x_transpose = math_ops.matmul(x, x, adjoint_b=True)
x_norm_squared = array_ops.matrix_diag_part(x_x_transpose)
x_norm_squared_tile = array_ops.expand_dims(x_norm_squared, 2)
# squared_dists[b,i,j] = ||x_bi - x_bj||^2 = x_bi'x_bi- 2x_bi'x_bj + x_bj'x_bj
squared_dists = x_norm_squared_tile - 2 * x_x_transpose + array_ops.transpose(
x_norm_squared_tile, [0, 2, 1])
return squared_dists
def _solve_interpolation(train_points, train_values, order,
regularization_weight):
"""Solve for interpolation coefficients.
Computes the coefficients of the polyharmonic interpolant for the 'training'
data defined by (train_points, train_values) using the kernel phi.
Args:
train_points: `[b, n, d]` interpolation centers
train_values: `[b, n, k]` function values
order: order of the interpolation
regularization_weight: weight to place on smoothness regularization term
Returns:
w: `[b, n, k]` weights on each interpolation center
v: `[b, d, k]` weights on each input dimension
Raises:
ValueError: if d or k is not fully specified.
"""
# These dimensions are set dynamically at runtime.
b, n, _ = array_ops.unstack(array_ops.shape(train_points), num=3)
d = train_points.shape[-1]
if tensor_shape.dimension_value(d) is None:
raise ValueError('The dimensionality of the input points (d) must be '
'statically-inferrable.')
k = train_values.shape[-1]
if tensor_shape.dimension_value(k) is None:
raise ValueError('The dimensionality of the output values (k) must be '
'statically-inferrable.')
# First, rename variables so that the notation (c, f, w, v, A, B, etc.)
# follows https://en.wikipedia.org/wiki/Polyharmonic_spline.
# To account for python style guidelines we use
# matrix_a for A and matrix_b for B.
c = train_points
f = train_values
# Next, construct the linear system.
with ops.name_scope('construct_linear_system'):
matrix_a = _phi(_pairwise_squared_distance_matrix(c), order) # [b, n, n]
if regularization_weight > 0:
batch_identity_matrix = array_ops.expand_dims(
linalg_ops.eye(n, dtype=c.dtype), 0)
matrix_a += regularization_weight * batch_identity_matrix
# Append ones to the feature values for the bias term in the linear model.
ones = array_ops.ones_like(c[..., :1], dtype=c.dtype)
matrix_b = array_ops.concat([c, ones], 2) # [b, n, d + 1]
# [b, n + d + 1, n]
left_block = array_ops.concat(
[matrix_a, array_ops.transpose(matrix_b, [0, 2, 1])], 1)
num_b_cols = matrix_b.get_shape()[2] # d + 1
lhs_zeros = array_ops.zeros([b, num_b_cols, num_b_cols], train_points.dtype)
right_block = array_ops.concat([matrix_b, lhs_zeros],
1) # [b, n + d + 1, d + 1]
lhs = array_ops.concat([left_block, right_block],
2) # [b, n + d + 1, n + d + 1]
rhs_zeros = array_ops.zeros([b, d + 1, k], train_points.dtype)
rhs = array_ops.concat([f, rhs_zeros], 1) # [b, n + d + 1, k]
# Then, solve the linear system and unpack the results.
with ops.name_scope('solve_linear_system'):
w_v = linalg_ops.matrix_solve(lhs, rhs)
w = w_v[:, :n, :]
v = w_v[:, n:, :]
return w, v
def _apply_interpolation(query_points, train_points, w, v, order):
"""Apply polyharmonic interpolation model to data.
Given coefficients w and v for the interpolation model, we evaluate
interpolated function values at query_points.
Args:
query_points: `[b, m, d]` x values to evaluate the interpolation at
train_points: `[b, n, d]` x values that act as the interpolation centers
( the c variables in the wikipedia article)
w: `[b, n, k]` weights on each interpolation center
v: `[b, d, k]` weights on each input dimension
order: order of the interpolation
Returns:
Polyharmonic interpolation evaluated at points defined in query_points.
"""
# First, compute the contribution from the rbf term.
pairwise_dists = _cross_squared_distance_matrix(query_points, train_points)
phi_pairwise_dists = _phi(pairwise_dists, order)
rbf_term = math_ops.matmul(phi_pairwise_dists, w)
# Then, compute the contribution from the linear term.
# Pad query_points with ones, for the bias term in the linear model.
query_points_pad = array_ops.concat([
query_points,
array_ops.ones_like(query_points[..., :1], train_points.dtype)
], 2)
linear_term = math_ops.matmul(query_points_pad, v)
return rbf_term + linear_term
def _phi(r, order):
"""Coordinate-wise nonlinearity used to define the order of the interpolation.
See https://en.wikipedia.org/wiki/Polyharmonic_spline for the definition.
Args:
r: input op
order: interpolation order
Returns:
phi_k evaluated coordinate-wise on r, for k = r
"""
# using EPSILON prevents log(0), sqrt0), etc.
# sqrt(0) is well-defined, but its gradient is not
with ops.name_scope('phi'):
if order == 1:
r = math_ops.maximum(r, EPSILON)
r = math_ops.sqrt(r)
return r
elif order == 2:
return 0.5 * r * math_ops.log(math_ops.maximum(r, EPSILON))
elif order == 4:
return 0.5 * math_ops.square(r) * math_ops.log(
math_ops.maximum(r, EPSILON))
elif order % 2 == 0:
r = math_ops.maximum(r, EPSILON)
return 0.5 * math_ops.pow(r, 0.5 * order) * math_ops.log(r)
else:
r = math_ops.maximum(r, EPSILON)
return math_ops.pow(r, 0.5 * order)
def interpolate_spline(train_points,
train_values,
query_points,
order,
regularization_weight=0.0,
name='interpolate_spline'):
r"""Interpolate signal using polyharmonic interpolation.
The interpolant has the form
$$f(x) = \sum_{i = 1}^n w_i \phi(||x - c_i||) + v^T x + b.$$
This is a sum of two terms: (1) a weighted sum of radial basis function (RBF)
terms, with the centers \\(c_1, ... c_n\\), and (2) a linear term with a bias.
The \\(c_i\\) vectors are 'training' points. In the code, b is absorbed into v
by appending 1 as a final dimension to x. The coefficients w and v are
estimated such that the interpolant exactly fits the value of the function at
the \\(c_i\\) points, the vector w is orthogonal to each \\(c_i\\), and the
vector w sums to 0. With these constraints, the coefficients can be obtained
by solving a linear system.
\\(\phi\\) is an RBF, parametrized by an interpolation
order. Using order=2 produces the well-known thin-plate spline.
We also provide the option to perform regularized interpolation. Here, the
interpolant is selected to trade off between the squared loss on the training
data and a certain measure of its curvature
([details](https://en.wikipedia.org/wiki/Polyharmonic_spline)).
Using a regularization weight greater than zero has the effect that the
interpolant will no longer exactly fit the training data. However, it may be
less vulnerable to overfitting, particularly for high-order interpolation.
Note the interpolation procedure is differentiable with respect to all inputs
besides the order parameter.
We support dynamically-shaped inputs, where batch_size, n, and m are None
at graph construction time. However, d and k must be known.
Args:
train_points: `[batch_size, n, d]` float `Tensor` of n d-dimensional
locations. These do not need to be regularly-spaced.
train_values: `[batch_size, n, k]` float `Tensor` of n c-dimensional values
evaluated at train_points.
query_points: `[batch_size, m, d]` `Tensor` of m d-dimensional locations
where we will output the interpolant's values.
order: order of the interpolation. Common values are 1 for
\\(\phi(r) = r\\), 2 for \\(\phi(r) = r^2 * log(r)\\) (thin-plate spline),
or 3 for \\(\phi(r) = r^3\\).
regularization_weight: weight placed on the regularization term.
This will depend substantially on the problem, and it should always be
tuned. For many problems, it is reasonable to use no regularization.
If using a non-zero value, we recommend a small value like 0.001.
name: name prefix for ops created by this function
Returns:
`[b, m, k]` float `Tensor` of query values. We use train_points and
train_values to perform polyharmonic interpolation. The query values are
the values of the interpolant evaluated at the locations specified in
query_points.
"""
with ops.name_scope(name):
train_points = ops.convert_to_tensor(train_points)
train_values = ops.convert_to_tensor(train_values)
query_points = ops.convert_to_tensor(query_points)
# First, fit the spline to the observed data.
with ops.name_scope('solve'):
w, v = _solve_interpolation(train_points, train_values, order,
regularization_weight)
# Then, evaluate the spline at the query locations.
with ops.name_scope('predict'):
query_values = _apply_interpolation(query_points, train_points, w, v,
order)
return query_values
| apache-2.0 |
rob-smallshire/asq | asq/test/test_single.py | 1 | 1353 | import unittest
from asq.queryables import Queryable
__author__ = "Sixty North"
class TestSingle(unittest.TestCase):
def test_single(self):
a = [5]
b = Queryable(a).single()
self.assertEqual(b, 5)
def test_single_empty(self):
a = []
self.assertRaises(ValueError, lambda: Queryable(a).single())
def test_single_multiple(self):
a = [4, 7]
self.assertRaises(ValueError, lambda: Queryable(a).single())
def test_single_predicate(self):
a = ["Aardvark", "Cat", "Dog", "Elephant"]
b = Queryable(a).single(lambda x: x.startswith('D'))
self.assertEqual(b, "Dog")
def test_single_predicate_not_callable(self):
a = ["Aardvark", "Cat", "Dog", "Elephant"]
self.assertRaises(TypeError, lambda: Queryable(a).single("not callable"))
def test_single_predicate_empty(self):
a = []
self.assertRaises(ValueError, lambda: Queryable(a).single(lambda x: x.startswith('D')))
def test_single_predicate_multiple(self):
a = ["Aardvark", "Cat", "Dog", "Elephant", "Dolphin"]
self.assertRaises(ValueError, lambda: Queryable(a).single(lambda x: x.startswith('D')))
def test_single_closed(self):
a = [5]
b = Queryable(a)
b.close()
self.assertRaises(ValueError, lambda: b.single())
| mit |
muratcansahin/itucsdb1626 | foodle/controllers/post_comments_controller.py | 1 | 3956 | #!/usr/bin/env python3
import foodle
import psycopg2
from psycopg2.extras import DictCursor
from flask import Blueprint, render_template, current_app, request, redirect, make_response, g
from foodle.utils.auth_hook import auth_hook_functor
post_comments_controller = Blueprint('post_comments_controller', __name__)
@post_comments_controller.route('/', methods=['GET'])
def index():
limit = request.args.get('limit') or 20
offset = request.args.get('offset') or 0
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
SELECT pc.id, u.username, pc.post_id, pc.body
FROM post_comments AS pc
INNER JOIN users AS u ON pc.user_id = u.id
LIMIT %s
OFFSET %s
""",
[limit, offset])
post_comments = curs.fetchall()
curs.execute(
"""
SELECT count(id)
FROM post_comments
""")
count = curs.fetchone()[0]
return render_template('/post_comments/index.html', post_comments=post_comments, count=count)
@post_comments_controller.route('/<int:id>', methods=['GET'])
def show(id):
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
SELECT *
FROM post_comments
WHERE id = %s
""",
[id])
post_comment = curs.fetchone()
if post_comment is not None:
return render_template('/post_comments/show.html', post_comment=post_comment)
else:
return "Entity not found.", 404
@post_comments_controller.route('/<int:post_id>/comments/', methods=['POST'])
@auth_hook_functor
def create(post_id):
user_id = g.current_user['id']
body = request.json['body']
if not isinstance(body, str) or not isinstance(user_id, int):
return "Request body is unprocessable", 422
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
INSERT INTO post_comments
(user_id, post_id, body)
VALUES (%s, %s, %s)
RETURNING id
""",
[user_id, post_id, body])
post_comment = curs.fetchone()
resp = make_response()
resp.headers['location'] = '/post_comments/' + str(post_comment['id'])
return resp, 201
@post_comments_controller.route('/<int:id>', methods=['PUT', 'PATCH'])
def update(id):
if request.json.get('id') is not None or not isinstance(request.json.get('body'), str):
return "Request is unprocessable.", 422
request.json['id'] = id
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
UPDATE post_comments
SET body = %(body)s
WHERE id = %(id)s
""", request.json)
if curs.rowcount is not 0:
resp = make_response()
resp.headers['location'] = '/post_comments/' + str(id)
return resp, 200
else:
return "Entity not found.", 404
@post_comments_controller.route('/<int:post_id>/comments/<int:id>/', methods=['DELETE'])
def delete(post_id, id):
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
DELETE FROM post_comments
WHERE id = %s
""",
[id])
if curs.rowcount is not 0:
return "", 204
else:
return "Entity not found.", 404
| gpl-3.0 |
illicitonion/givabit | lib/sdks/google_appengine_1.7.1/google_appengine/lib/django_0_96/django/test/testcases.py | 32 | 2036 | import re, doctest, unittest
from django.db import transaction
from django.core import management
from django.db.models import get_apps
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
ok = doctest.OutputChecker.check_output(self, want, got, optionflags)
# Doctest does an exact string comparison of output, which means long
# integers aren't equal to normal integers ("22L" vs. "22"). The
# following code normalizes long integers so that they equal normal
# integers.
if not ok:
return normalize_long_ints(want) == normalize_long_ints(got)
return ok
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
def report_unexpected_exception(self, out, test, example, exc_info):
doctest.DocTestRunner.report_unexpected_exception(self,out,test,example,exc_info)
# Rollback, in case of database errors. Otherwise they'd have
# side effects on other tests.
from django.db import transaction
transaction.rollback_unless_managed()
class TestCase(unittest.TestCase):
def install_fixtures(self):
"""If the Test Case class has a 'fixtures' member, clear the database and
install the named fixtures at the start of each test.
"""
management.flush(verbosity=0, interactive=False)
if hasattr(self, 'fixtures'):
management.load_data(self.fixtures, verbosity=0)
def run(self, result=None):
"""Wrapper around default run method so that user-defined Test Cases
automatically call install_fixtures without having to include a call to
super().
"""
self.install_fixtures()
super(TestCase, self).run(result)
| apache-2.0 |
chirilo/mozillians | vendor-local/lib/python/unidecode/x010.py | 252 | 4110 | data = (
'k', # 0x00
'kh', # 0x01
'g', # 0x02
'gh', # 0x03
'ng', # 0x04
'c', # 0x05
'ch', # 0x06
'j', # 0x07
'jh', # 0x08
'ny', # 0x09
'nny', # 0x0a
'tt', # 0x0b
'tth', # 0x0c
'dd', # 0x0d
'ddh', # 0x0e
'nn', # 0x0f
'tt', # 0x10
'th', # 0x11
'd', # 0x12
'dh', # 0x13
'n', # 0x14
'p', # 0x15
'ph', # 0x16
'b', # 0x17
'bh', # 0x18
'm', # 0x19
'y', # 0x1a
'r', # 0x1b
'l', # 0x1c
'w', # 0x1d
's', # 0x1e
'h', # 0x1f
'll', # 0x20
'a', # 0x21
'[?]', # 0x22
'i', # 0x23
'ii', # 0x24
'u', # 0x25
'uu', # 0x26
'e', # 0x27
'[?]', # 0x28
'o', # 0x29
'au', # 0x2a
'[?]', # 0x2b
'aa', # 0x2c
'i', # 0x2d
'ii', # 0x2e
'u', # 0x2f
'uu', # 0x30
'e', # 0x31
'ai', # 0x32
'[?]', # 0x33
'[?]', # 0x34
'[?]', # 0x35
'N', # 0x36
'\'', # 0x37
':', # 0x38
'', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'[?]', # 0x3e
'[?]', # 0x3f
'0', # 0x40
'1', # 0x41
'2', # 0x42
'3', # 0x43
'4', # 0x44
'5', # 0x45
'6', # 0x46
'7', # 0x47
'8', # 0x48
'9', # 0x49
' / ', # 0x4a
' // ', # 0x4b
'n*', # 0x4c
'r*', # 0x4d
'l*', # 0x4e
'e*', # 0x4f
'sh', # 0x50
'ss', # 0x51
'R', # 0x52
'RR', # 0x53
'L', # 0x54
'LL', # 0x55
'R', # 0x56
'RR', # 0x57
'L', # 0x58
'LL', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'[?]', # 0x60
'[?]', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'[?]', # 0x66
'[?]', # 0x67
'[?]', # 0x68
'[?]', # 0x69
'[?]', # 0x6a
'[?]', # 0x6b
'[?]', # 0x6c
'[?]', # 0x6d
'[?]', # 0x6e
'[?]', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'[?]', # 0x82
'[?]', # 0x83
'[?]', # 0x84
'[?]', # 0x85
'[?]', # 0x86
'[?]', # 0x87
'[?]', # 0x88
'[?]', # 0x89
'[?]', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'[?]', # 0x90
'[?]', # 0x91
'[?]', # 0x92
'[?]', # 0x93
'[?]', # 0x94
'[?]', # 0x95
'[?]', # 0x96
'[?]', # 0x97
'[?]', # 0x98
'[?]', # 0x99
'[?]', # 0x9a
'[?]', # 0x9b
'[?]', # 0x9c
'[?]', # 0x9d
'[?]', # 0x9e
'[?]', # 0x9f
'A', # 0xa0
'B', # 0xa1
'G', # 0xa2
'D', # 0xa3
'E', # 0xa4
'V', # 0xa5
'Z', # 0xa6
'T`', # 0xa7
'I', # 0xa8
'K', # 0xa9
'L', # 0xaa
'M', # 0xab
'N', # 0xac
'O', # 0xad
'P', # 0xae
'Zh', # 0xaf
'R', # 0xb0
'S', # 0xb1
'T', # 0xb2
'U', # 0xb3
'P`', # 0xb4
'K`', # 0xb5
'G\'', # 0xb6
'Q', # 0xb7
'Sh', # 0xb8
'Ch`', # 0xb9
'C`', # 0xba
'Z\'', # 0xbb
'C', # 0xbc
'Ch', # 0xbd
'X', # 0xbe
'J', # 0xbf
'H', # 0xc0
'E', # 0xc1
'Y', # 0xc2
'W', # 0xc3
'Xh', # 0xc4
'OE', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'[?]', # 0xc8
'[?]', # 0xc9
'[?]', # 0xca
'[?]', # 0xcb
'[?]', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'a', # 0xd0
'b', # 0xd1
'g', # 0xd2
'd', # 0xd3
'e', # 0xd4
'v', # 0xd5
'z', # 0xd6
't`', # 0xd7
'i', # 0xd8
'k', # 0xd9
'l', # 0xda
'm', # 0xdb
'n', # 0xdc
'o', # 0xdd
'p', # 0xde
'zh', # 0xdf
'r', # 0xe0
's', # 0xe1
't', # 0xe2
'u', # 0xe3
'p`', # 0xe4
'k`', # 0xe5
'g\'', # 0xe6
'q', # 0xe7
'sh', # 0xe8
'ch`', # 0xe9
'c`', # 0xea
'z\'', # 0xeb
'c', # 0xec
'ch', # 0xed
'x', # 0xee
'j', # 0xef
'h', # 0xf0
'e', # 0xf1
'y', # 0xf2
'w', # 0xf3
'xh', # 0xf4
'oe', # 0xf5
'f', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
' // ', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| bsd-3-clause |
StackStorm/python-mistralclient | mistralclient/tests/unit/v2/test_cli_actions.py | 1 | 6033 | # Copyright 2014 Mirantis, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
import six
from mistralclient.api.v2 import actions
from mistralclient.commands.v2 import actions as action_cmd
from mistralclient.commands.v2 import base as cmd_base
from mistralclient.tests.unit import base
ACTION_DICT = {
'id': '1234-4567-7894-7895',
'name': 'a',
'is_system': True,
'input': "param1",
'description': 'My cool action',
'tags': ['test'],
'created_at': '1',
'updated_at': '1'
}
ACTION_DEF = """
---
version: '2.0'
base: std.echo
base-parameters:
output: "<% $.str1 %><% $.str2 %>"
output: "<% $ %><% $ %>"
"""
ACTION_WITH_DEF_DICT = ACTION_DICT.copy()
ACTION_WITH_DEF_DICT.update({'definition': ACTION_DEF})
ACTION = actions.Action(mock, ACTION_DICT)
ACTION_WITH_DEF = actions.Action(mock, ACTION_WITH_DEF_DICT)
class TestCLIActionsV2(base.BaseCommandTest):
@mock.patch('argparse.open', create=True)
def test_create(self, mock_open):
self.client.actions.create.return_value = [ACTION]
result = self.call(action_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_create_public(self, mock_open):
self.client.actions.create.return_value = [ACTION]
result = self.call(
action_cmd.Create,
app_args=['1.txt', '--public']
)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.actions.create.call_args[1]['scope']
)
@mock.patch('argparse.open', create=True)
def test_create_long_input(self, mock_open):
action_long_input_dict = ACTION_DICT.copy()
long_input = ', '.join(
['var%s' % i for i in six.moves.xrange(10)]
)
action_long_input_dict['input'] = long_input
workflow_long_input = actions.Action(
mock.Mock(),
action_long_input_dict
)
self.client.actions.create.return_value = [workflow_long_input]
result = self.call(action_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, cmd_base.cut(long_input),
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update(self, mock_open):
self.client.actions.update.return_value = [ACTION]
result = self.call(action_cmd.Update, app_args=['my_action.yaml'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update_public(self, mock_open):
self.client.actions.update.return_value = [ACTION]
result = self.call(
action_cmd.Update,
app_args=['my_action.yaml', '--public']
)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.actions.update.call_args[1]['scope']
)
def test_list(self):
self.client.actions.list.return_value = [ACTION]
result = self.call(action_cmd.List)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
def test_get(self):
self.client.actions.get.return_value = ACTION
result = self.call(action_cmd.Get, app_args=['name'])
self.assertEqual(
('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1'),
result[1]
)
def test_delete(self):
self.call(action_cmd.Delete, app_args=['name'])
self.client.actions.delete.assert_called_once_with('name')
def test_delete_with_multi_names(self):
self.call(action_cmd.Delete, app_args=['name1', 'name2'])
self.assertEqual(2, self.client.actions.delete.call_count)
self.assertEqual(
[mock.call('name1'), mock.call('name2')],
self.client.actions.delete.call_args_list
)
def test_get_definition(self):
self.client.actions.get.return_value = ACTION_WITH_DEF
self.call(action_cmd.GetDefinition, app_args=['name'])
self.app.stdout.write.assert_called_with(ACTION_DEF)
@mock.patch('argparse.open', create=True)
def test_validate(self, mock_open):
self.client.actions.validate.return_value = {'valid': True}
result = self.call(action_cmd.Validate, app_args=['action.yaml'])
self.assertEqual((True, None), result[1])
@mock.patch('argparse.open', create=True)
def test_validate_failed(self, mock_open):
self.client.actions.validate.return_value = {
'valid': False,
'error': 'Invalid DSL...'
}
result = self.call(action_cmd.Validate, app_args=['action.yaml'])
self.assertEqual((False, 'Invalid DSL...'), result[1])
| apache-2.0 |
MAKOSCAFEE/oppia | core/domain/user_jobs_one_off_test.py | 3 | 49652 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for user-related one-off computations."""
import ast
import datetime
import re
from constants import constants
from core.domain import collection_domain
from core.domain import collection_services
from core.domain import exp_services
from core.domain import event_services
from core.domain import feedback_services
from core.domain import rating_services
from core.domain import rights_manager
from core.domain import subscription_services
from core.domain import user_jobs_one_off
from core.domain import user_jobs_continuous_test
from core.domain import user_services
from core.platform import models
from core.tests import test_utils
import feconf
(user_models, feedback_models) = models.Registry.import_models(
[models.NAMES.user, models.NAMES.feedback])
taskqueue_services = models.Registry.import_taskqueue_services()
search_services = models.Registry.import_search_services()
class UserContributionsOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off dashboard subscriptions job."""
EXP_ID_1 = 'exp_id_1'
EXP_ID_2 = 'exp_id_2'
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
USER_B_EMAIL = 'b@example.com'
USER_B_USERNAME = 'b'
USER_C_EMAIL = 'c@example.com'
USER_C_USERNAME = 'c'
USER_D_EMAIL = 'd@example.com'
USER_D_USERNAME = 'd'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_jobs_one_off.UserContributionsOneOffJob.create_new()
user_jobs_one_off.UserContributionsOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
def setUp(self):
super(UserContributionsOneOffJobTests, self).setUp()
# User A has no created or edited explorations
# User B has one created exploration
# User C has one edited exploration
# User D has created an exploration and then edited it.
# (This is used to check that there are no duplicate
# entries in the contribution lists.)
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
self.user_a_id = self.get_user_id_from_email(self.USER_A_EMAIL)
self.signup(self.USER_B_EMAIL, self.USER_B_USERNAME)
self.user_b_id = self.get_user_id_from_email(self.USER_B_EMAIL)
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
self.user_c_id = self.get_user_id_from_email(self.USER_C_EMAIL)
self.signup(self.USER_D_EMAIL, self.USER_D_USERNAME)
self.user_d_id = self.get_user_id_from_email(self.USER_D_EMAIL)
self.save_new_valid_exploration(
self.EXP_ID_1, self.user_b_id, end_state_name='End')
exp_services.update_exploration(self.user_c_id, self.EXP_ID_1, [{
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'the objective'
}], 'Test edit')
self.save_new_valid_exploration(
self.EXP_ID_2, self.user_d_id, end_state_name='End')
exp_services.update_exploration(self.user_d_id, self.EXP_ID_2, [{
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'the objective'
}], 'Test edit')
def test_null_case(self):
"""Tests the case where user has no created or edited explorations."""
self._run_one_off_job()
user_a_contributions_model = user_models.UserContributionsModel.get(
self.user_a_id, strict=False)
self.assertEqual(user_a_contributions_model.created_exploration_ids, [])
self.assertEqual(user_a_contributions_model.edited_exploration_ids, [])
def test_created_exp(self):
"""Tests the case where user has created (and therefore edited)
an exploration."""
self._run_one_off_job()
user_b_contributions_model = user_models.UserContributionsModel.get(
self.user_b_id)
self.assertEqual(
user_b_contributions_model.created_exploration_ids, [self.EXP_ID_1])
self.assertEqual(
user_b_contributions_model.edited_exploration_ids, [self.EXP_ID_1])
def test_edited_exp(self):
"""Tests the case where user has an edited exploration."""
self._run_one_off_job()
user_c_contributions_model = user_models.UserContributionsModel.get(
self.user_c_id)
self.assertEqual(
user_c_contributions_model.created_exploration_ids, [])
self.assertEqual(
user_c_contributions_model.edited_exploration_ids, [self.EXP_ID_1])
def test_for_duplicates(self):
"""Tests the case where user has an edited exploration, and edits
it again making sure it is not duplicated."""
self._run_one_off_job()
user_d_contributions_model = user_models.UserContributionsModel.get(
self.user_d_id)
self.assertEqual(
user_d_contributions_model.edited_exploration_ids,
[self.EXP_ID_2])
self.assertEqual(
user_d_contributions_model.created_exploration_ids,
[self.EXP_ID_2])
class UserDefaultDashboardOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off username length distribution job."""
CREATOR_USER_EMAIL = 'creator@example.com'
CREATOR_USER_USERNAME = 'creator'
LEARNER_USER_EMAIL = 'learner@example.com'
LEARNER_USER_USERNAME = 'learner'
EXP_ID_1 = 'exp_id_1'
EXP_ID_2 = 'exp_id_2'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_jobs_one_off.UserDefaultDashboardOneOffJob.create_new())
user_jobs_one_off.UserDefaultDashboardOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
def test_default_dashboard(self):
"""Tests whether the one off jobs assigns the correct dashboard
to the user.
"""
self.signup(self.CREATOR_USER_EMAIL, self.CREATOR_USER_USERNAME)
creator_user_id = self.get_user_id_from_email(
self.CREATOR_USER_EMAIL)
self.signup(self.LEARNER_USER_EMAIL, self.LEARNER_USER_USERNAME)
learner_user_id = self.get_user_id_from_email(
self.LEARNER_USER_EMAIL)
self.save_new_valid_exploration(
self.EXP_ID_1, creator_user_id, end_state_name='End')
self._run_one_off_job()
creator_settings = user_services.get_user_settings(creator_user_id)
learner_settings = user_services.get_user_settings(learner_user_id)
self.assertEqual(
creator_settings.default_dashboard,
constants.DASHBOARD_TYPE_CREATOR)
self.assertEqual(
learner_settings.default_dashboard,
constants.DASHBOARD_TYPE_LEARNER)
class UsernameLengthDistributionOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off username length distribution job."""
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
USER_B_EMAIL = 'ab@example.com'
USER_B_USERNAME = 'ab'
USER_C_EMAIL = 'bc@example.com'
USER_C_USERNAME = 'bc'
USER_D_EMAIL = 'bcd@example.com'
USER_D_USERNAME = 'bcd'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_jobs_one_off.UsernameLengthDistributionOneOffJob.create_new())
user_jobs_one_off.UsernameLengthDistributionOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_jobs_one_off.UsernameLengthDistributionOneOffJob.get_output(
job_id))
output = {}
for stringified_distribution in stringified_output:
value = re.findall(r'\d+', stringified_distribution)
# output['username length'] = number of users
output[value[0]] = int(value[1])
return output
def test_null_case(self):
"""Tests the case when there are no signed up users but there is one
default user having the username - 'tmpsuperadm1n'.
"""
output = self._run_one_off_job()
# number of users = 1.
# length of usernames = 13 (tmpsuperadm1n).
self.assertEqual(output['13'], 1)
def test_single_user_case(self):
"""Tests the case when there is only one signed up user and a default
user - 'tmpsuperadm1n'.
"""
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
output = self._run_one_off_job()
# number of users = 2.
# length of usernames = 13 (tmpsuperadm1n), 1 (a).
self.assertEqual(output['13'], 1)
self.assertEqual(output['1'], 1)
def test_multiple_users_case(self):
"""Tests the case when there are multiple signed up users and a
default user - 'tmpsuperadm1n'.
"""
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
self.signup(self.USER_B_EMAIL, self.USER_B_USERNAME)
output = self._run_one_off_job()
# number of users = 3
# length of usernames = 13 (tmpsuperadm1n), 2 (ab), 1 (a).
self.assertEqual(output['13'], 1)
self.assertEqual(output['2'], 1)
self.assertEqual(output['1'], 1)
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
self.signup(self.USER_D_EMAIL, self.USER_D_USERNAME)
output = self._run_one_off_job()
# number of users = 5
# length of usernames = 13 (tmpsuperadm1n), 3 (bcd), 2 (ab, bc), 1 (a).
self.assertEqual(output['13'], 1)
self.assertEqual(output['3'], 1)
self.assertEqual(output['2'], 2)
self.assertEqual(output['1'], 1)
class LongUserBiosOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off long userbio length job."""
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
USER_A_BIO = 'I am less than 500'
USER_B_EMAIL = 'b@example.com'
USER_B_USERNAME = 'b'
USER_B_BIO = 'Long Bio' * 100
USER_C_EMAIL = 'c@example.com'
USER_C_USERNAME = 'c'
USER_C_BIO = 'Same Bio' * 100
USER_D_EMAIL = 'd@example.com'
USER_D_USERNAME = 'd'
USER_D_BIO = 'Diff Bio' * 300
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_jobs_one_off.LongUserBiosOneOffJob.create_new())
user_jobs_one_off.LongUserBiosOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
stringified_output = (
user_jobs_one_off.LongUserBiosOneOffJob.get_output(
job_id))
eval_output = [ast.literal_eval(stringified_item)
for stringified_item in stringified_output]
output = [[int(eval_item[0]), eval_item[1]]
for eval_item in eval_output]
return output
def test_no_userbio_returns_empty_list(self):
"""Tests the case when userbio is None."""
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
result = self._run_one_off_job()
self.assertEqual(result, [])
def test_short_userbio_returns_empty_list(self):
"""Tests the case where the userbio is less than 500 characters."""
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
user_id_a = self.get_user_id_from_email(self.USER_A_EMAIL)
user_services.update_user_bio(user_id_a, self.USER_A_BIO)
result = self._run_one_off_job()
self.assertEqual(result, [])
def test_long_userbio_length(self):
"""Tests the case where the userbio is more than 500 characters."""
self.signup(self.USER_B_EMAIL, self.USER_B_USERNAME)
user_id_b = self.get_user_id_from_email(self.USER_B_EMAIL)
user_services.update_user_bio(user_id_b, self.USER_B_BIO)
result = self._run_one_off_job()
expected_result = [[800, ['b']]]
self.assertEqual(result, expected_result)
def test_same_userbio_length(self):
"""Tests the case where two users have same userbio length."""
self.signup(self.USER_B_EMAIL, self.USER_B_USERNAME)
user_id_b = self.get_user_id_from_email(self.USER_B_EMAIL)
user_services.update_user_bio(user_id_b, self.USER_B_BIO)
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
user_id_c = self.get_user_id_from_email(self.USER_C_EMAIL)
user_services.update_user_bio(user_id_c, self.USER_C_BIO)
result = self._run_one_off_job()
result[0][1].sort()
expected_result = [[800, ['b', 'c']]]
self.assertEqual(result, expected_result)
def test_diff_userbio_length(self):
"""Tests the case where two users have different userbio lengths."""
self.signup(self.USER_D_EMAIL, self.USER_D_USERNAME)
user_id_d = self.get_user_id_from_email(self.USER_D_EMAIL)
user_services.update_user_bio(user_id_d, self.USER_D_BIO)
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
user_id_c = self.get_user_id_from_email(self.USER_C_EMAIL)
user_services.update_user_bio(user_id_c, self.USER_C_BIO)
result = self._run_one_off_job()
expected_result = [[800, ['c']], [2400, ['d']]]
self.assertEqual(result, expected_result)
class DashboardSubscriptionsOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off dashboard subscriptions job."""
EXP_ID_1 = 'exp_id_1'
EXP_ID_2 = 'exp_id_2'
COLLECTION_ID_1 = 'col_id_1'
COLLECTION_ID_2 = 'col_id_2'
EXP_ID_FOR_COLLECTION_1 = 'id_of_exp_in_collection_1'
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
USER_B_EMAIL = 'b@example.com'
USER_B_USERNAME = 'b'
USER_C_EMAIL = 'c@example.com'
USER_C_USERNAME = 'c'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_jobs_one_off.DashboardSubscriptionsOneOffJob.create_new()
user_jobs_one_off.DashboardSubscriptionsOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
def _null_fn(self, *args, **kwargs):
"""A mock for functions of the form subscribe_to_*() to represent
behavior prior to the implementation of subscriptions.
"""
pass
def setUp(self):
super(DashboardSubscriptionsOneOffJobTests, self).setUp()
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
self.user_a_id = self.get_user_id_from_email(self.USER_A_EMAIL)
self.signup(self.USER_B_EMAIL, self.USER_B_USERNAME)
self.user_b_id = self.get_user_id_from_email(self.USER_B_EMAIL)
self.signup(self.USER_C_EMAIL, self.USER_C_USERNAME)
self.user_c_id = self.get_user_id_from_email(self.USER_C_EMAIL)
self.user_a = user_services.UserActionsInfo(self.user_a_id)
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User A creates and saves a new valid exploration.
self.save_new_valid_exploration(
self.EXP_ID_1, self.user_a_id, end_state_name='End')
def test_null_case(self):
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id, strict=False)
self.assertEqual(user_b_subscriptions_model, None)
self._run_one_off_job()
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id, strict=False)
self.assertEqual(user_b_subscriptions_model, None)
def test_feedback_thread_subscription(self):
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id, strict=False)
user_c_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_c_id, strict=False)
self.assertEqual(user_b_subscriptions_model, None)
self.assertEqual(user_c_subscriptions_model, None)
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User B starts a feedback thread.
feedback_services.create_thread(
self.EXP_ID_1, None, self.user_b_id, 'subject', 'text')
# User C adds to that thread.
thread_id = feedback_services.get_all_threads(
self.EXP_ID_1, False)[0].get_thread_id()
feedback_services.create_message(
self.EXP_ID_1, thread_id, self.user_c_id, None, None,
'more text')
self._run_one_off_job()
# Both users are subscribed to the feedback thread.
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id)
user_c_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_c_id)
self.assertEqual(user_b_subscriptions_model.activity_ids, [])
self.assertEqual(user_c_subscriptions_model.activity_ids, [])
full_thread_id = (
feedback_models.FeedbackThreadModel.generate_full_thread_id(
self.EXP_ID_1, thread_id))
self.assertEqual(
user_b_subscriptions_model.feedback_thread_ids, [full_thread_id])
self.assertEqual(
user_c_subscriptions_model.feedback_thread_ids, [full_thread_id])
def test_exploration_subscription(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User A adds user B as an editor to the exploration.
rights_manager.assign_role_for_exploration(
self.user_a, self.EXP_ID_1, self.user_b_id,
rights_manager.ROLE_EDITOR)
# User A adds user C as a viewer of the exploration.
rights_manager.assign_role_for_exploration(
self.user_a, self.EXP_ID_1, self.user_c_id,
rights_manager.ROLE_VIEWER)
self._run_one_off_job()
# Users A and B are subscribed to the exploration. User C is not.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id)
user_c_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_c_id, strict=False)
self.assertEqual(
user_a_subscriptions_model.activity_ids, [self.EXP_ID_1])
self.assertEqual(
user_b_subscriptions_model.activity_ids, [self.EXP_ID_1])
self.assertEqual(user_a_subscriptions_model.feedback_thread_ids, [])
self.assertEqual(user_b_subscriptions_model.feedback_thread_ids, [])
self.assertEqual(user_c_subscriptions_model, None)
def test_two_explorations(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User A creates and saves another valid exploration.
self.save_new_valid_exploration(self.EXP_ID_2, self.user_a_id)
self._run_one_off_job()
# User A is subscribed to two explorations.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
self.assertEqual(
sorted(user_a_subscriptions_model.activity_ids),
sorted([self.EXP_ID_1, self.EXP_ID_2]))
def test_community_owned_exploration(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User A adds user B as an editor to the exploration.
rights_manager.assign_role_for_exploration(
self.user_a, self.EXP_ID_1, self.user_b_id,
rights_manager.ROLE_EDITOR)
# The exploration becomes community-owned.
rights_manager.publish_exploration(self.user_a, self.EXP_ID_1)
rights_manager.release_ownership_of_exploration(
self.user_a, self.EXP_ID_1)
# User C edits the exploration.
exp_services.update_exploration(
self.user_c_id, self.EXP_ID_1, [], 'Update exploration')
self._run_one_off_job()
# User A and user B are subscribed to the exploration; user C is not.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id)
user_c_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_c_id, strict=False)
self.assertEqual(
user_a_subscriptions_model.activity_ids, [self.EXP_ID_1])
self.assertEqual(
user_b_subscriptions_model.activity_ids, [self.EXP_ID_1])
self.assertEqual(user_c_subscriptions_model, None)
def test_deleted_exploration(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
):
# User A deletes the exploration.
exp_services.delete_exploration(self.user_a_id, self.EXP_ID_1)
self._run_one_off_job()
# User A is not subscribed to the exploration.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id, strict=False)
self.assertEqual(user_a_subscriptions_model, None)
def test_collection_subscription(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_collection', self._null_fn
):
# User A creates and saves a new valid collection.
self.save_new_valid_collection(
self.COLLECTION_ID_1, self.user_a_id,
exploration_id=self.EXP_ID_FOR_COLLECTION_1)
# User A adds user B as an editor to the collection.
rights_manager.assign_role_for_collection(
self.user_a, self.COLLECTION_ID_1, self.user_b_id,
rights_manager.ROLE_EDITOR)
# User A adds user C as a viewer of the collection.
rights_manager.assign_role_for_collection(
self.user_a, self.COLLECTION_ID_1, self.user_c_id,
rights_manager.ROLE_VIEWER)
self._run_one_off_job()
# Users A and B are subscribed to the collection. User C is not.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id)
user_c_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_c_id, strict=False)
self.assertEqual(
user_a_subscriptions_model.collection_ids, [self.COLLECTION_ID_1])
# User A is also subscribed to the exploration within the collection
# because they created both.
self.assertEqual(
sorted(user_a_subscriptions_model.activity_ids), [
self.EXP_ID_1, self.EXP_ID_FOR_COLLECTION_1])
self.assertEqual(
user_b_subscriptions_model.collection_ids, [self.COLLECTION_ID_1])
self.assertEqual(user_a_subscriptions_model.feedback_thread_ids, [])
self.assertEqual(user_b_subscriptions_model.feedback_thread_ids, [])
self.assertEqual(user_c_subscriptions_model, None)
def test_two_collections(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_collection', self._null_fn
):
# User A creates and saves a new valid collection.
self.save_new_valid_collection(
self.COLLECTION_ID_1, self.user_a_id,
exploration_id=self.EXP_ID_FOR_COLLECTION_1)
# User A creates and saves another valid collection.
self.save_new_valid_collection(
self.COLLECTION_ID_2, self.user_a_id,
exploration_id=self.EXP_ID_FOR_COLLECTION_1)
self._run_one_off_job()
# User A is subscribed to two collections.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
self.assertEqual(
sorted(user_a_subscriptions_model.collection_ids),
sorted([self.COLLECTION_ID_1, self.COLLECTION_ID_2]))
def test_deleted_collection(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_exploration', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_collection', self._null_fn
):
# User A creates and saves a new collection.
self.save_new_default_collection(
self.COLLECTION_ID_1, self.user_a_id)
# User A deletes the collection.
collection_services.delete_collection(
self.user_a_id, self.COLLECTION_ID_1)
# User A deletes the exploration from earlier.
exp_services.delete_exploration(self.user_a_id, self.EXP_ID_1)
self._run_one_off_job()
# User A is not subscribed to the collection.
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id, strict=False)
self.assertEqual(user_a_subscriptions_model, None)
def test_adding_exploration_to_collection(self):
with self.swap(
subscription_services, 'subscribe_to_thread', self._null_fn
), self.swap(
subscription_services, 'subscribe_to_collection', self._null_fn
):
# User B creates and saves a new collection.
self.save_new_default_collection(
self.COLLECTION_ID_1, self.user_b_id)
# User B adds the exploration created by user A to the collection.
collection_services.update_collection(
self.user_b_id, self.COLLECTION_ID_1, [{
'cmd': collection_domain.CMD_ADD_COLLECTION_NODE,
'exploration_id': self.EXP_ID_1
}], 'Add new exploration to collection.')
# Users A and B have no subscriptions (to either explorations or
# collections).
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id, strict=False)
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id, strict=False)
self.assertEqual(user_a_subscriptions_model, None)
self.assertEqual(user_b_subscriptions_model, None)
self._run_one_off_job()
user_a_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_a_id)
user_b_subscriptions_model = user_models.UserSubscriptionsModel.get(
self.user_b_id)
# User B should be subscribed to the collection and user A to the
# exploration.
self.assertEqual(
user_a_subscriptions_model.activity_ids, [self.EXP_ID_1])
self.assertEqual(
user_a_subscriptions_model.collection_ids, [])
self.assertEqual(
user_b_subscriptions_model.activity_ids, [])
self.assertEqual(
user_b_subscriptions_model.collection_ids, [self.COLLECTION_ID_1])
class DashboardStatsOneOffJobTests(test_utils.GenericTestBase):
"""Tests for the one-off dashboard stats job."""
CURRENT_DATE_AS_STRING = user_services.get_current_date_as_string()
DATE_AFTER_ONE_WEEK = (
(datetime.datetime.utcnow() + datetime.timedelta(7)).strftime(
feconf.DASHBOARD_STATS_DATETIME_STRING_FORMAT))
USER_SESSION_ID = 'session1'
EXP_ID_1 = 'exp_id_1'
EXP_ID_2 = 'exp_id_2'
EXP_VERSION = 1
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = user_jobs_one_off.DashboardStatsOneOffJob.create_new()
user_jobs_one_off.DashboardStatsOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
def setUp(self):
super(DashboardStatsOneOffJobTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
def _mock_get_current_date_as_string(self):
return self.CURRENT_DATE_AS_STRING
def _rate_exploration(self, user_id, exp_id, rating):
rating_services.assign_rating_to_exploration(user_id, exp_id, rating)
def _record_play(self, exp_id, state):
event_services.StartExplorationEventHandler.record(
exp_id, self.EXP_VERSION, state, self.USER_SESSION_ID, {},
feconf.PLAY_TYPE_NORMAL)
def test_weekly_stats_if_continuous_stats_job_has_not_been_run(self):
exploration = self.save_new_valid_exploration(
self.EXP_ID_1, self.owner_id)
exp_id = exploration.id
init_state_name = exploration.init_state_name
self._record_play(exp_id, init_state_name)
self._rate_exploration('user1', exp_id, 5)
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, None)
self.assertEquals(
user_services.get_last_week_dashboard_stats(self.owner_id), None)
with self.swap(user_services,
'get_current_date_as_string',
self._mock_get_current_date_as_string):
self._run_one_off_job()
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
expected_results_list = [{
self._mock_get_current_date_as_string(): {
'num_ratings': 0,
'average_ratings': None,
'total_plays': 0
}
}]
self.assertEqual(weekly_stats, expected_results_list)
self.assertEquals(
user_services.get_last_week_dashboard_stats(self.owner_id),
expected_results_list[0])
def test_weekly_stats_if_no_explorations(self):
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
start_computation())
self.process_and_flush_pending_tasks()
with self.swap(user_services,
'get_current_date_as_string',
self._mock_get_current_date_as_string):
self._run_one_off_job()
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, [{
self._mock_get_current_date_as_string(): {
'num_ratings': 0,
'average_ratings': None,
'total_plays': 0
}
}])
def test_weekly_stats_for_single_exploration(self):
exploration = self.save_new_valid_exploration(
self.EXP_ID_1, self.owner_id)
exp_id = exploration.id
init_state_name = exploration.init_state_name
self._record_play(exp_id, init_state_name)
self._rate_exploration('user1', exp_id, 5)
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
start_computation())
self.process_and_flush_pending_tasks()
with self.swap(user_services,
'get_current_date_as_string',
self._mock_get_current_date_as_string):
self._run_one_off_job()
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, [{
self._mock_get_current_date_as_string(): {
'num_ratings': 1,
'average_ratings': 5.0,
'total_plays': 1
}
}])
def test_weekly_stats_for_multiple_explorations(self):
exploration_1 = self.save_new_valid_exploration(
self.EXP_ID_1, self.owner_id)
exp_id_1 = exploration_1.id
exploration_2 = self.save_new_valid_exploration(
self.EXP_ID_2, self.owner_id)
exp_id_2 = exploration_2.id
init_state_name_1 = exploration_1.init_state_name
self._record_play(exp_id_1, init_state_name_1)
self._rate_exploration('user1', exp_id_1, 5)
self._rate_exploration('user2', exp_id_2, 4)
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
start_computation())
self.process_and_flush_pending_tasks()
with self.swap(user_services,
'get_current_date_as_string',
self._mock_get_current_date_as_string):
self._run_one_off_job()
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, [{
self._mock_get_current_date_as_string(): {
'num_ratings': 2,
'average_ratings': 4.5,
'total_plays': 1
}
}])
def test_stats_for_multiple_weeks(self):
exploration = self.save_new_valid_exploration(
self.EXP_ID_1, self.owner_id)
exp_id = exploration.id
init_state_name = exploration.init_state_name
self._rate_exploration('user1', exp_id, 4)
self._record_play(exp_id, init_state_name)
self._record_play(exp_id, init_state_name)
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
start_computation())
self.process_and_flush_pending_tasks()
with self.swap(user_services,
'get_current_date_as_string',
self._mock_get_current_date_as_string):
self._run_one_off_job()
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, [{
self._mock_get_current_date_as_string(): {
'num_ratings': 1,
'average_ratings': 4.0,
'total_plays': 2
}
}])
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
stop_computation(self.owner_id))
self.process_and_flush_pending_tasks()
self._rate_exploration('user2', exp_id, 2)
(user_jobs_continuous_test.ModifiedUserStatsAggregator.
start_computation())
self.process_and_flush_pending_tasks()
def _mock_get_date_after_one_week():
"""Returns the date of the next week."""
return self.DATE_AFTER_ONE_WEEK
with self.swap(user_services,
'get_current_date_as_string',
_mock_get_date_after_one_week):
self._run_one_off_job()
expected_results_list = [
{
self._mock_get_current_date_as_string(): {
'num_ratings': 1,
'average_ratings': 4.0,
'total_plays': 2
}
},
{
_mock_get_date_after_one_week(): {
'num_ratings': 2,
'average_ratings': 3.0,
'total_plays': 2
}
}
]
weekly_stats = user_services.get_weekly_dashboard_stats(self.owner_id)
self.assertEqual(weekly_stats, expected_results_list)
self.assertEquals(
user_services.get_last_week_dashboard_stats(self.owner_id),
expected_results_list[1])
class UserFirstContributionMsecOneOffJobTests(test_utils.GenericTestBase):
EXP_ID = 'test_exp'
def setUp(self):
super(UserFirstContributionMsecOneOffJobTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.admin = user_services.UserActionsInfo(self.admin_id)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
def test_contribution_msec_updates_on_published_explorations(self):
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.admin_id, end_state_name='End')
init_state_name = exploration.init_state_name
# Test that no contribution time is set.
job_id = (
user_jobs_one_off.UserFirstContributionMsecOneOffJob.create_new())
user_jobs_one_off.UserFirstContributionMsecOneOffJob.enqueue(job_id)
self.process_and_flush_pending_tasks()
self.assertIsNone(
user_services.get_user_settings(
self.admin_id).first_contribution_msec)
# Test all owners and editors of exploration after publication have
# updated times.
exp_services.publish_exploration_and_update_user_profiles(
self.admin, self.EXP_ID)
rights_manager.release_ownership_of_exploration(
self.admin, self.EXP_ID)
exp_services.update_exploration(
self.editor_id, self.EXP_ID, [{
'cmd': 'edit_state_property',
'state_name': init_state_name,
'property_name': 'widget_id',
'new_value': 'MultipleChoiceInput'
}], 'commit')
job_id = (
user_jobs_one_off.UserFirstContributionMsecOneOffJob.create_new())
user_jobs_one_off.UserFirstContributionMsecOneOffJob.enqueue(job_id)
self.process_and_flush_pending_tasks()
self.assertIsNotNone(user_services.get_user_settings(
self.admin_id).first_contribution_msec)
self.assertIsNotNone(user_services.get_user_settings(
self.editor_id).first_contribution_msec)
def test_contribution_msec_does_not_update_on_unpublished_explorations(
self):
self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, end_state_name='End')
exp_services.publish_exploration_and_update_user_profiles(
self.owner, self.EXP_ID)
# We now manually reset the user's first_contribution_msec to None.
# This is to test that the one off job skips over the unpublished
# exploration and does not reset the user's first_contribution_msec.
user_services._update_first_contribution_msec( # pylint: disable=protected-access
self.owner_id, None)
rights_manager.unpublish_exploration(self.admin, self.EXP_ID)
# Test that first contribution time is not set for unpublished
# explorations.
job_id = (
user_jobs_one_off.UserFirstContributionMsecOneOffJob.create_new())
user_jobs_one_off.UserFirstContributionMsecOneOffJob.enqueue(job_id)
self.process_and_flush_pending_tasks()
self.assertIsNone(user_services.get_user_settings(
self.owner_id).first_contribution_msec)
class UserProfilePictureOneOffJobTests(test_utils.GenericTestBase):
FETCHED_GRAVATAR = 'fetched_gravatar'
def setUp(self):
super(UserProfilePictureOneOffJobTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
def test_new_profile_picture_is_generated_if_it_does_not_exist(self):
user_services.update_profile_picture_data_url(self.owner_id, None)
# Before the job runs, the data URL is None.
user_settings = user_services.get_user_settings(self.owner_id)
self.assertIsNone(user_settings.profile_picture_data_url)
job_id = (
user_jobs_one_off.UserProfilePictureOneOffJob.create_new())
user_jobs_one_off.UserProfilePictureOneOffJob.enqueue(job_id)
def _mock_fetch_gravatar(unused_email):
return self.FETCHED_GRAVATAR
with self.swap(user_services, 'fetch_gravatar', _mock_fetch_gravatar):
self.process_and_flush_pending_tasks()
# After the job runs, the data URL has been updated.
new_user_settings = user_services.get_user_settings(self.owner_id)
self.assertEqual(
new_user_settings.profile_picture_data_url, self.FETCHED_GRAVATAR)
def test_profile_picture_is_not_regenerated_if_it_already_exists(self):
user_services.update_profile_picture_data_url(
self.owner_id, 'manually_added_data_url')
# Before the job runs, the data URL is the manually-added one.
user_settings = user_services.get_user_settings(self.owner_id)
self.assertEqual(
user_settings.profile_picture_data_url, 'manually_added_data_url')
job_id = (
user_jobs_one_off.UserProfilePictureOneOffJob.create_new())
user_jobs_one_off.UserProfilePictureOneOffJob.enqueue(job_id)
def _mock_fetch_gravatar(unused_email):
return self.FETCHED_GRAVATAR
with self.swap(user_services, 'fetch_gravatar', _mock_fetch_gravatar):
self.process_and_flush_pending_tasks()
# After the job runs, the data URL is still the manually-added one.
new_user_settings = user_services.get_user_settings(self.owner_id)
self.assertEqual(
new_user_settings.profile_picture_data_url,
'manually_added_data_url')
class UserLastExplorationActivityOneOffJobTests(test_utils.GenericTestBase):
def setUp(self):
super(UserLastExplorationActivityOneOffJobTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.exp_id = 'exp'
def _run_one_off_job(self):
"""Runs the one-off MapReduce job."""
job_id = (
user_jobs_one_off.UserLastExplorationActivityOneOffJob.create_new())
user_jobs_one_off.UserLastExplorationActivityOneOffJob.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
def test_that_last_created_time_is_updated(self):
self.login(self.OWNER_EMAIL)
self.save_new_valid_exploration(
self.exp_id, self.owner_id, end_state_name='End')
self.logout()
user_settings = user_services.get_user_settings(self.owner_id)
user_settings.last_created_an_exploration = None
user_services._save_user_settings(user_settings) # pylint: disable=protected-access
owner_settings = user_services.get_user_settings(self.owner_id)
self.assertIsNone(owner_settings.last_created_an_exploration)
self.assertIsNone(owner_settings.last_edited_an_exploration)
self._run_one_off_job()
owner_settings = user_services.get_user_settings(self.owner_id)
self.assertIsNotNone(owner_settings.last_created_an_exploration)
self.assertIsNotNone(owner_settings.last_edited_an_exploration)
def test_that_last_edited_time_is_updated(self):
self.login(self.OWNER_EMAIL)
self.save_new_valid_exploration(
self.exp_id, self.owner_id, end_state_name='End')
self.logout()
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(self.editor_id, self.exp_id, [{
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'the objective'
}], 'Test edit')
self.logout()
user_settings = user_services.get_user_settings(self.editor_id)
user_settings.last_edited_an_exploration = None
user_services._save_user_settings(user_settings) # pylint: disable=protected-access
editor_settings = user_services.get_user_settings(self.editor_id)
self.assertIsNone(editor_settings.last_created_an_exploration)
self.assertIsNone(editor_settings.last_edited_an_exploration)
self._run_one_off_job()
editor_settings = user_services.get_user_settings(self.editor_id)
self.assertIsNotNone(editor_settings.last_edited_an_exploration)
self.assertIsNone(editor_settings.last_created_an_exploration)
def test_that_last_edited_and_created_time_both_updated(self):
self.login(self.OWNER_EMAIL)
self.save_new_valid_exploration(
self.exp_id, self.owner_id, end_state_name='End')
exp_services.update_exploration(self.owner_id, self.exp_id, [{
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'the objective'
}], 'Test edit')
self.logout()
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(self.editor_id, self.exp_id, [{
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'new objective'
}], 'Test edit new')
self.logout()
user_settings = user_services.get_user_settings(self.owner_id)
user_settings.last_created_an_exploration = None
user_settings.last_edited_an_exploration = None
user_services._save_user_settings(user_settings) # pylint: disable=protected-access
user_settings = user_services.get_user_settings(self.editor_id)
user_settings.last_edited_an_exploration = None
user_services._save_user_settings(user_settings) # pylint: disable=protected-access
owner_settings = user_services.get_user_settings(self.owner_id)
editor_settings = user_services.get_user_settings(self.editor_id)
self.assertIsNone(owner_settings.last_created_an_exploration)
self.assertIsNone(owner_settings.last_edited_an_exploration)
self.assertIsNone(editor_settings.last_created_an_exploration)
self.assertIsNone(editor_settings.last_edited_an_exploration)
self._run_one_off_job()
owner_settings = user_services.get_user_settings(self.owner_id)
editor_settings = user_services.get_user_settings(self.editor_id)
self.assertIsNotNone(owner_settings.last_edited_an_exploration)
self.assertIsNotNone(owner_settings.last_created_an_exploration)
self.assertIsNotNone(editor_settings.last_edited_an_exploration)
self.assertIsNone(editor_settings.last_created_an_exploration)
def test_that_last_edited_and_created_time_are_not_updated(self):
user_settings = user_services.get_user_settings(self.owner_id)
user_settings.last_created_an_exploration = None
user_settings.last_edited_an_exploration = None
user_services._save_user_settings(user_settings) # pylint: disable=protected-access
owner_settings = user_services.get_user_settings(self.owner_id)
self.assertIsNone(owner_settings.last_created_an_exploration)
self.assertIsNone(owner_settings.last_edited_an_exploration)
self._run_one_off_job()
owner_settings = user_services.get_user_settings(self.owner_id)
self.assertIsNone(owner_settings.last_created_an_exploration)
self.assertIsNone(owner_settings.last_edited_an_exploration)
| apache-2.0 |
darkryder/django | django/contrib/contenttypes/fields.py | 49 | 26120 | from __future__ import unicode_literals
from collections import defaultdict
from django.contrib.contenttypes.models import ContentType
from django.core import checks
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, models, router, transaction
from django.db.models import DO_NOTHING
from django.db.models.base import ModelBase, make_foreign_order_accessors
from django.db.models.fields.related import (
ForeignObject, ForeignObjectRel, ReverseManyToOneDescriptor,
lazy_related_operation,
)
from django.db.models.query_utils import PathInfo
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.functional import cached_property
@python_2_unicode_compatible
class GenericForeignKey(object):
"""
Provide a generic many-to-one relation through the ``content_type`` and
``object_id`` fields.
This class also doubles as an accessor to the related object (similar to
ForwardManyToOneDescriptor) by adding itself as a model attribute.
"""
# Field flags
auto_created = False
concrete = False
editable = False
hidden = False
is_relation = True
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
related_model = None
remote_field = None
def __init__(self, ct_field='content_type', fk_field='object_id', for_concrete_model=True):
self.ct_field = ct_field
self.fk_field = fk_field
self.for_concrete_model = for_concrete_model
self.editable = False
self.rel = None
self.column = None
def contribute_to_class(self, cls, name, **kwargs):
self.name = name
self.model = cls
self.cache_attr = "_%s_cache" % name
cls._meta.add_field(self, private=True)
setattr(cls, name, self)
def get_filter_kwargs_for_object(self, obj):
"""See corresponding method on Field"""
return {
self.fk_field: getattr(obj, self.fk_field),
self.ct_field: getattr(obj, self.ct_field),
}
def get_forward_related_filter(self, obj):
"""See corresponding method on RelatedField"""
return {
self.fk_field: obj.pk,
self.ct_field: ContentType.objects.get_for_model(obj).pk,
}
def __str__(self):
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_object_id_field())
errors.extend(self._check_content_type_field())
return errors
def _check_field_name(self):
if self.name.endswith("_"):
return [
checks.Error(
'Field names must not end with an underscore.',
obj=self,
id='fields.E001',
)
]
else:
return []
def _check_object_id_field(self):
try:
self.model._meta.get_field(self.fk_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey object ID references the non-existent field '%s'." % self.fk_field,
obj=self,
id='contenttypes.E001',
)
]
else:
return []
def _check_content_type_field(self):
"""
Check if field named `field_name` in model `model` exists and is a
valid content_type field (is a ForeignKey to ContentType).
"""
try:
field = self.model._meta.get_field(self.ct_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey content type references the non-existent field '%s.%s'." % (
self.model._meta.object_name, self.ct_field
),
obj=self,
id='contenttypes.E002',
)
]
else:
if not isinstance(field, models.ForeignKey):
return [
checks.Error(
"'%s.%s' is not a ForeignKey." % (
self.model._meta.object_name, self.ct_field
),
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E003',
)
]
elif field.remote_field.model != ContentType:
return [
checks.Error(
"'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % (
self.model._meta.object_name, self.ct_field
),
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def get_content_type(self, obj=None, id=None, using=None):
if obj is not None:
return ContentType.objects.db_manager(obj._state.db).get_for_model(
obj, for_concrete_model=self.for_concrete_model)
elif id is not None:
return ContentType.objects.db_manager(using).get_for_id(id)
else:
# This should never happen. I love comments like this, don't you?
raise Exception("Impossible arguments to GFK.get_content_type!")
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is not None:
raise ValueError("Custom queryset can't be used for this lookup.")
# For efficiency, group the instances by content type and then do one
# query per model
fk_dict = defaultdict(set)
# We need one instance for each group in order to get the right db:
instance_dict = {}
ct_attname = self.model._meta.get_field(self.ct_field).get_attname()
for instance in instances:
# We avoid looking for values if either ct_id or fkey value is None
ct_id = getattr(instance, ct_attname)
if ct_id is not None:
fk_val = getattr(instance, self.fk_field)
if fk_val is not None:
fk_dict[ct_id].add(fk_val)
instance_dict[ct_id] = instance
ret_val = []
for ct_id, fkeys in fk_dict.items():
instance = instance_dict[ct_id]
ct = self.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
# For doing the join in Python, we have to match both the FK val and the
# content type, so we use a callable that returns a (fk, class) pair.
def gfk_key(obj):
ct_id = getattr(obj, ct_attname)
if ct_id is None:
return None
else:
model = self.get_content_type(id=ct_id,
using=obj._state.db).model_class()
return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)),
model)
return (ret_val,
lambda obj: (obj._get_pk_val(), obj.__class__),
gfk_key,
True,
self.name)
def is_cached(self, instance):
return hasattr(instance, self.cache_attr)
def __get__(self, instance, cls=None):
if instance is None:
return self
# Don't use getattr(instance, self.ct_field) here because that might
# reload the same ContentType over and over (#5570). Instead, get the
# content type ID here, and later when the actual instance is needed,
# use ContentType.objects.get_for_id(), which has a global cache.
f = self.model._meta.get_field(self.ct_field)
ct_id = getattr(instance, f.get_attname(), None)
pk_val = getattr(instance, self.fk_field)
try:
rel_obj = getattr(instance, self.cache_attr)
except AttributeError:
rel_obj = None
else:
if rel_obj and (ct_id != self.get_content_type(obj=rel_obj, using=instance._state.db).id or
rel_obj._meta.pk.to_python(pk_val) != rel_obj._get_pk_val()):
rel_obj = None
if rel_obj is not None:
return rel_obj
if ct_id is not None:
ct = self.get_content_type(id=ct_id, using=instance._state.db)
try:
rel_obj = ct.get_object_for_this_type(pk=pk_val)
except ObjectDoesNotExist:
pass
setattr(instance, self.cache_attr, rel_obj)
return rel_obj
def __set__(self, instance, value):
ct = None
fk = None
if value is not None:
ct = self.get_content_type(obj=value)
fk = value._get_pk_val()
setattr(instance, self.ct_field, ct)
setattr(instance, self.fk_field, fk)
setattr(instance, self.cache_attr, value)
class GenericRel(ForeignObjectRel):
"""
Used by GenericRelation to store information about the relation.
"""
def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None):
super(GenericRel, self).__init__(
field, to,
related_name=related_query_name or '+',
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
on_delete=DO_NOTHING,
)
class GenericRelation(ForeignObject):
"""
Provide a reverse to a relation created by a GenericForeignKey.
"""
# Field flags
auto_created = False
many_to_many = False
many_to_one = False
one_to_many = True
one_to_one = False
rel_class = GenericRel
def __init__(self, to, object_id_field='object_id', content_type_field='content_type',
for_concrete_model=True, related_query_name=None, limit_choices_to=None, **kwargs):
kwargs['rel'] = self.rel_class(
self, to,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
kwargs['blank'] = True
kwargs['on_delete'] = models.CASCADE
kwargs['editable'] = False
kwargs['serialize'] = False
# This construct is somewhat of an abuse of ForeignObject. This field
# represents a relation from pk to object_id field. But, this relation
# isn't direct, the join is generated reverse along foreign key. So,
# the from_field is object_id field, to_field is pk because of the
# reverse join.
super(GenericRelation, self).__init__(
to, from_fields=[object_id_field], to_fields=[], **kwargs)
self.object_id_field_name = object_id_field
self.content_type_field_name = content_type_field
self.for_concrete_model = for_concrete_model
def check(self, **kwargs):
errors = super(GenericRelation, self).check(**kwargs)
errors.extend(self._check_generic_foreign_key_existence())
return errors
def _is_matching_generic_foreign_key(self, field):
"""
Return True if field is a GenericForeignKey whose content type and
object id fields correspond to the equivalent attributes on this
GenericRelation.
"""
return (
isinstance(field, GenericForeignKey) and
field.ct_field == self.content_type_field_name and
field.fk_field == self.object_id_field_name
)
def _check_generic_foreign_key_existence(self):
target = self.remote_field.model
if isinstance(target, ModelBase):
fields = target._meta.private_fields
if any(self._is_matching_generic_foreign_key(field) for field in fields):
return []
else:
return [
checks.Error(
"The GenericRelation defines a relation with the model "
"'%s.%s', but that model does not have a GenericForeignKey." % (
target._meta.app_label, target._meta.object_name
),
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def resolve_related_fields(self):
self.to_fields = [self.model._meta.pk.name]
return [(self.remote_field.model._meta.get_field(self.object_id_field_name), self.model._meta.pk)]
def _get_path_info_with_parent(self):
"""
Return the path that joins the current model through any parent models.
The idea is that if you have a GFK defined on a parent model then we
need to join the parent model first, then the child model.
"""
# With an inheritance chain ChildTag -> Tag and Tag defines the
# GenericForeignKey, and a TaggedItem model has a GenericRelation to
# ChildTag, then we need to generate a join from TaggedItem to Tag
# (as Tag.object_id == TaggedItem.pk), and another join from Tag to
# ChildTag (as that is where the relation is to). Do this by first
# generating a join to the parent model, then generating joins to the
# child models.
path = []
opts = self.remote_field.model._meta
parent_opts = opts.get_field(self.object_id_field_name).model._meta
target = parent_opts.pk
path.append(PathInfo(self.model._meta, parent_opts, (target,), self.remote_field, True, False))
# Collect joins needed for the parent -> child chain. This is easiest
# to do if we collect joins for the child -> parent chain and then
# reverse the direction (call to reverse() and use of
# field.remote_field.get_path_info()).
parent_field_chain = []
while parent_opts != opts:
field = opts.get_ancestor_link(parent_opts.model)
parent_field_chain.append(field)
opts = field.remote_field.model._meta
parent_field_chain.reverse()
for field in parent_field_chain:
path.extend(field.remote_field.get_path_info())
return path
def get_path_info(self):
opts = self.remote_field.model._meta
object_id_field = opts.get_field(self.object_id_field_name)
if object_id_field.model != opts.model:
return self._get_path_info_with_parent()
else:
target = opts.pk
return [PathInfo(self.model._meta, opts, (target,), self.remote_field, True, False)]
def get_reverse_path_info(self):
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(from_opts, opts, (opts.pk,), self, not self.unique, False)]
def value_to_string(self, obj):
qs = getattr(obj, self.name).all()
return smart_text([instance._get_pk_val() for instance in qs])
def contribute_to_class(self, cls, name, **kwargs):
kwargs['private_only'] = True
super(GenericRelation, self).contribute_to_class(cls, name, **kwargs)
self.model = cls
setattr(cls, self.name, ReverseGenericManyToOneDescriptor(self.remote_field))
# Add get_RELATED_order() and set_RELATED_order() to the model this
# field belongs to, if the model on the other end of this relation
# is ordered with respect to its corresponding GenericForeignKey.
if not cls._meta.abstract:
def make_generic_foreign_order_accessors(related_model, model):
if self._is_matching_generic_foreign_key(model._meta.order_with_respect_to):
make_foreign_order_accessors(model, related_model)
lazy_related_operation(make_generic_foreign_order_accessors, self.model, self.remote_field.model)
def set_attributes_from_rel(self):
pass
def get_internal_type(self):
return "ManyToManyField"
def get_content_type(self):
"""
Return the content type associated with this field's model.
"""
return ContentType.objects.get_for_model(self.model,
for_concrete_model=self.for_concrete_model)
def get_extra_restriction(self, where_class, alias, remote_alias):
field = self.remote_field.model._meta.get_field(self.content_type_field_name)
contenttype_pk = self.get_content_type().pk
cond = where_class()
lookup = field.get_lookup('exact')(field.get_col(remote_alias), contenttype_pk)
cond.add(lookup, 'AND')
return cond
def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS):
"""
Return all objects related to ``objs`` via this ``GenericRelation``.
"""
return self.remote_field.model._base_manager.db_manager(using).filter(**{
"%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
self.model, for_concrete_model=self.for_concrete_model).pk,
"%s__in" % self.object_id_field_name: [obj.pk for obj in objs]
})
class ReverseGenericManyToOneDescriptor(ReverseManyToOneDescriptor):
"""
Accessor to the related objects manager on the one-to-many relation created
by GenericRelation.
In the example::
class Post(Model):
comments = GenericRelation(Comment)
``post.comments`` is a ReverseGenericManyToOneDescriptor instance.
"""
@cached_property
def related_manager_cls(self):
return create_generic_related_manager(
self.rel.model._default_manager.__class__,
self.rel,
)
def create_generic_related_manager(superclass, rel):
"""
Factory function to create a manager that subclasses another manager
(generally the default manager of a given model) and adds behaviors
specific to generic relations.
"""
class GenericRelatedObjectManager(superclass):
def __init__(self, instance=None):
super(GenericRelatedObjectManager, self).__init__()
self.instance = instance
self.model = rel.model
content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(
instance, for_concrete_model=rel.field.for_concrete_model)
self.content_type = content_type
self.content_type_field_name = rel.field.content_type_field_name
self.object_id_field_name = rel.field.object_id_field_name
self.prefetch_cache_name = rel.field.attname
self.pk_val = instance._get_pk_val()
self.core_filters = {
'%s__pk' % self.content_type_field_name: content_type.id,
self.object_id_field_name: self.pk_val,
}
def __call__(self, **kwargs):
# We use **kwargs rather than a kwarg argument to enforce the
# `manager='manager_name'` syntax.
manager = getattr(self.model, kwargs.pop('manager'))
manager_class = create_generic_related_manager(manager.__class__, rel)
return manager_class(instance=self.instance)
do_not_call_in_templates = True
def __str__(self):
return repr(self)
def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
db = self._db or router.db_for_read(self.model, instance=self.instance)
return queryset.using(db).filter(**self.core_filters)
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
queryset = super(GenericRelatedObjectManager, self).get_queryset()
return self._apply_rel_filters(queryset)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super(GenericRelatedObjectManager, self).get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
query = {
'%s__pk' % self.content_type_field_name: self.content_type.id,
'%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
}
# We (possibly) need to convert object IDs to the type of the
# instances' PK in order to match up instances:
object_id_converter = instances[0]._meta.pk.to_python
return (queryset.filter(**query),
lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)),
lambda obj: obj._get_pk_val(),
False,
self.prefetch_cache_name)
def add(self, *objs, **kwargs):
bulk = kwargs.pop('bulk', True)
db = router.db_for_write(self.model, instance=self.instance)
def check_and_update_obj(obj):
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (
self.model._meta.object_name, obj
))
setattr(obj, self.content_type_field_name, self.content_type)
setattr(obj, self.object_id_field_name, self.pk_val)
if bulk:
pks = []
for obj in objs:
if obj._state.adding or obj._state.db != db:
raise ValueError(
"%r instance isn't saved. Use bulk=False or save "
"the object first." % obj
)
check_and_update_obj(obj)
pks.append(obj.pk)
self.model._base_manager.using(db).filter(pk__in=pks).update(**{
self.content_type_field_name: self.content_type,
self.object_id_field_name: self.pk_val,
})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
check_and_update_obj(obj)
obj.save()
add.alters_data = True
def remove(self, *objs, **kwargs):
if not objs:
return
bulk = kwargs.pop('bulk', True)
self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk)
remove.alters_data = True
def clear(self, **kwargs):
bulk = kwargs.pop('bulk', True)
self._clear(self, bulk)
clear.alters_data = True
def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
# `QuerySet.delete()` creates its own atomic block which
# contains the `pre_delete` and `post_delete` signal handlers.
queryset.delete()
else:
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
obj.delete()
_clear.alters_data = True
def set(self, objs, **kwargs):
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
bulk = kwargs.pop('bulk', True)
clear = kwargs.pop('clear', False)
db = router.db_for_write(self.model, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs, bulk=bulk)
else:
old_objs = set(self.using(db).all())
new_objs = []
for obj in objs:
if obj in old_objs:
old_objs.remove(obj)
else:
new_objs.append(obj)
self.remove(*old_objs)
self.add(*new_objs, bulk=bulk)
set.alters_data = True
def create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).get_or_create(**kwargs)
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).update_or_create(**kwargs)
update_or_create.alters_data = True
return GenericRelatedObjectManager
| bsd-3-clause |
lfblogs/aiopy | aiopy/required/aiohttp/protocol.py | 1 | 27699 | """Http related parsers and protocol."""
import collections
import functools
import http.server
import itertools
import re
import string
import sys
import zlib
from wsgiref.handlers import format_date_time
from aiopy.required import aiohttp
from aiopy.required.aiohttp import hdrs, errors
from .multidict import CIMultiDict
from .log import internal_logger
__all__ = ('HttpMessage', 'Request', 'Response',
'HttpVersion', 'HttpVersion10', 'HttpVersion11',
'RawRequestMessage', 'RawResponseMessage',
'HttpPrefixParser', 'HttpRequestParser', 'HttpResponseParser',
'HttpPayloadParser')
ASCIISET = set(string.printable)
METHRE = re.compile('[A-Z0-9$-_.]+')
VERSRE = re.compile('HTTP/(\d+).(\d+)')
HDRRE = re.compile('[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]')
CONTINUATION = (' ', '\t')
EOF_MARKER = object()
EOL_MARKER = object()
STATUS_LINE_READY = object()
RESPONSES = http.server.BaseHTTPRequestHandler.responses
HttpVersion = collections.namedtuple(
'HttpVersion', ['major', 'minor'])
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
RawStatusLineMessage = collections.namedtuple(
'RawStatusLineMessage', ['method', 'path', 'version'])
RawRequestMessage = collections.namedtuple(
'RawRequestMessage',
['method', 'path', 'version', 'headers', 'should_close', 'compression'])
RawResponseMessage = collections.namedtuple(
'RawResponseMessage',
['version', 'code', 'reason', 'headers', 'should_close', 'compression'])
class HttpParser:
def __init__(self, max_line_size=8190, max_headers=32768,
max_field_size=8190):
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(self, lines):
"""Parses RFC2822 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
close_conn = None
encoding = None
headers = CIMultiDict()
lines_idx = 1
line = lines[1]
while line:
header_length = len(line)
# Parse initial header name : value pair.
try:
name, value = line.split(':', 1)
except ValueError:
raise errors.InvalidHeader(line) from None
name = name.strip(' \t').upper()
if HDRRE.search(name):
raise errors.InvalidHeader(name)
# next line
lines_idx += 1
line = lines[lines_idx]
# consume continuation lines
continuation = line and line[0] in CONTINUATION
if continuation:
value = [value]
while continuation:
header_length += len(line)
if header_length > self.max_field_size:
raise errors.LineTooLong(
'limit request headers fields size')
value.append(line)
# next line
lines_idx += 1
line = lines[lines_idx]
continuation = line[0] in CONTINUATION
value = '\r\n'.join(value)
else:
if header_length > self.max_field_size:
raise errors.LineTooLong(
'limit request headers fields size')
value = value.strip()
# keep-alive and encoding
if name == hdrs.CONNECTION:
v = value.lower()
if v == 'close':
close_conn = True
elif v == 'keep-alive':
close_conn = False
elif name == hdrs.CONTENT_ENCODING:
enc = value.lower()
if enc in ('gzip', 'deflate'):
encoding = enc
headers.add(name, value)
return headers, close_conn, encoding
class HttpPrefixParser:
"""Waits for 'HTTP' prefix (non destructive)"""
def __init__(self, allowed_methods=()):
self.allowed_methods = [m.upper() for m in allowed_methods]
def __call__(self, out, buf):
raw_data = yield from buf.waituntil(b' ', 12)
method = raw_data.decode('ascii', 'surrogateescape').strip()
# method
method = method.upper()
if not METHRE.match(method):
raise errors.BadStatusLine(method)
# allowed method
if self.allowed_methods and method not in self.allowed_methods:
raise errors.HttpMethodNotAllowed(message=method)
out.feed_data(method, len(method))
out.feed_eof()
class HttpRequestParser(HttpParser):
"""Read request status line. Exception errors.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def __call__(self, out, buf):
# read http message (request line + headers)
try:
raw_data = yield from buf.readuntil(
b'\r\n\r\n', self.max_headers)
except errors.LineLimitExceededParserError as exc:
raise errors.LineTooLong(exc.limit) from None
lines = raw_data.decode(
'utf-8', 'surrogateescape').split('\r\n')
# request line
line = lines[0]
try:
method, path, version = line.split(None, 2)
except ValueError:
raise errors.BadStatusLine(line) from None
# method
method = method.upper()
if not METHRE.match(method):
raise errors.BadStatusLine(method)
# version
try:
if version.startswith('HTTP/'):
n1, n2 = version[5:].split('.', 1)
version = HttpVersion(int(n1), int(n2))
else:
raise errors.BadStatusLine(version)
except:
raise errors.BadStatusLine(version)
# read headers
headers, close, compression = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
else: # HTTP 1.1 must ask to close.
close = False
out.feed_data(
RawRequestMessage(
method, path, version, headers, close, compression),
len(raw_data))
out.feed_eof()
class HttpResponseParser(HttpParser):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
def __call__(self, out, buf):
# read http message (response line + headers)
try:
raw_data = yield from buf.readuntil(
b'\r\n\r\n', self.max_line_size + self.max_headers)
except errors.LineLimitExceededParserError as exc:
raise errors.LineTooLong(exc.limit) from None
lines = raw_data.decode(
'utf-8', 'surrogateescape').split('\r\n')
line = lines[0]
try:
version, status = line.split(None, 1)
except ValueError:
raise errors.BadStatusLine(line) from None
else:
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ''
# version
match = VERSRE.match(version)
if match is None:
raise errors.BadStatusLine(line)
version = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
try:
status = int(status)
except ValueError:
raise errors.BadStatusLine(line) from None
if status < 100 or status > 999:
raise errors.BadStatusLine(line)
# read headers
headers, close, compression = self.parse_headers(lines)
if close is None:
close = version <= HttpVersion10
out.feed_data(
RawResponseMessage(
version, status, reason.strip(),
headers, close, compression),
len(raw_data))
out.feed_eof()
class HttpPayloadParser:
def __init__(self, message, length=None, compression=True,
readall=False, response_with_body=True):
self.message = message
self.length = length
self.compression = compression
self.readall = readall
self.response_with_body = response_with_body
def __call__(self, out, buf):
# payload params
length = self.message.headers.get(hdrs.CONTENT_LENGTH, self.length)
if hdrs.SEC_WEBSOCKET_KEY1 in self.message.headers:
length = 8
# payload decompression wrapper
if self.compression and self.message.compression:
out = DeflateBuffer(out, self.message.compression)
# payload parser
if not self.response_with_body:
# don't parse payload if it's not expected to be received
pass
elif 'chunked' in self.message.headers.get(
hdrs.TRANSFER_ENCODING, ''):
yield from self.parse_chunked_payload(out, buf)
elif length is not None:
try:
length = int(length)
except ValueError:
raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None
if length < 0:
raise errors.InvalidHeader(hdrs.CONTENT_LENGTH)
elif length > 0:
yield from self.parse_length_payload(out, buf, length)
else:
if self.readall and getattr(self.message, 'code', 0) != 204:
yield from self.parse_eof_payload(out, buf)
elif getattr(self.message, 'method', None) in ('PUT', 'POST'):
internal_logger.warning( # pragma: no cover
'Content-Length or Transfer-Encoding header is required')
out.feed_eof()
def parse_chunked_payload(self, out, buf):
"""Chunked transfer encoding parser."""
while True:
# read next chunk size
line = yield from buf.readuntil(b'\r\n', 8192)
i = line.find(b';')
if i >= 0:
line = line[:i] # strip chunk-extensions
else:
line = line.strip()
try:
size = int(line, 16)
except ValueError:
raise errors.TransferEncodingError(line) from None
if size == 0: # eof marker
break
# read chunk and feed buffer
while size:
chunk = yield from buf.readsome(size)
out.feed_data(chunk, len(chunk))
size = size - len(chunk)
# toss the CRLF at the end of the chunk
yield from buf.skip(2)
# read and discard trailer up to the CRLF terminator
yield from buf.skipuntil(b'\r\n')
def parse_length_payload(self, out, buf, length=0):
"""Read specified amount of bytes."""
required = length
while required:
chunk = yield from buf.readsome(required)
out.feed_data(chunk, len(chunk))
required -= len(chunk)
def parse_eof_payload(self, out, buf):
"""Read all bytes until eof."""
try:
while True:
chunk = yield from buf.readsome()
out.feed_data(chunk, len(chunk))
except aiohttp.EofStream:
pass
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
def __init__(self, out, encoding):
self.out = out
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self.zlib = zlib.decompressobj(wbits=zlib_mode)
def feed_data(self, chunk, size):
try:
chunk = self.zlib.decompress(chunk)
except Exception:
raise errors.ContentEncodingError('deflate')
if chunk:
self.out.feed_data(chunk, len(chunk))
def feed_eof(self):
chunk = self.zlib.flush()
self.out.feed_data(chunk, len(chunk))
if not self.zlib.eof:
raise errors.ContentEncodingError('deflate')
self.out.feed_eof()
def wrap_payload_filter(func):
"""Wraps payload filter and piped filters.
Filter is a generator that accepts arbitrary chunks of data,
modify data and emit new stream of data.
For example we have stream of chunks: ['1', '2', '3', '4', '5'],
we can apply chunking filter to this stream:
['1', '2', '3', '4', '5']
|
response.add_chunking_filter(2)
|
['12', '34', '5']
It is possible to use different filters at the same time.
For a example to compress incoming stream with 'deflate' encoding
and then split data and emit chunks of 8192 bytes size chunks:
>>> response.add_compression_filter('deflate')
>>> response.add_chunking_filter(8192)
Filters do not alter transfer encoding.
Filter can receive types types of data, bytes object or EOF_MARKER.
1. If filter receives bytes object, it should process data
and yield processed data then yield EOL_MARKER object.
2. If Filter received EOF_MARKER, it should yield remaining
data (buffered) and then yield EOF_MARKER.
"""
@functools.wraps(func)
def wrapper(self, *args, **kw):
new_filter = func(self, *args, **kw)
filter = self.filter
if filter is not None:
next(new_filter)
self.filter = filter_pipe(filter, new_filter)
else:
self.filter = new_filter
next(self.filter)
return wrapper
def filter_pipe(filter, filter2, *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Creates pipe between two filters.
filter_pipe() feeds first filter with incoming data and then
send yielded from first filter data into filter2, results of
filter2 are being emitted.
1. If filter_pipe receives bytes object, it sends it to the first filter.
2. Reads yielded values from the first filter until it receives
EOF_MARKER or EOL_MARKER.
3. Each of this values is being send to second filter.
4. Reads yielded values from second filter until it receives EOF_MARKER
or EOL_MARKER. Each of this values yields to writer.
"""
chunk = yield
while True:
eof = chunk is EOF_MARKER
chunk = filter.send(chunk)
while chunk is not EOL_MARKER:
chunk = filter2.send(chunk)
while chunk not in (EOF_MARKER, EOL_MARKER):
yield chunk
chunk = next(filter2)
if chunk is not EOF_MARKER:
if eof:
chunk = EOF_MARKER
else:
chunk = next(filter)
else:
break
chunk = yield EOL_MARKER
class HttpMessage:
"""HttpMessage allows to write headers and payload to a stream.
For example, lets say we want to read file then compress it with deflate
compression and then send it with chunked transfer encoding, code may look
like this:
>>> response = aiohttp.Response(transport, 200)
We have to use deflate compression first:
>>> response.add_compression_filter('deflate')
Then we want to split output stream into chunks of 1024 bytes size:
>>> response.add_chunking_filter(1024)
We can add headers to response with add_headers() method. add_headers()
does not send data to transport, send_headers() sends request/response
line and then sends headers:
>>> response.add_headers(
... ('Content-Disposition', 'attachment; filename="..."'))
>>> response.send_headers()
Now we can use chunked writer to write stream to a network stream.
First call to write() method sends response status line and headers,
add_header() and add_headers() method unavailable at this stage:
>>> with open('...', 'rb') as f:
... chunk = fp.read(8192)
... while chunk:
... response.write(chunk)
... chunk = fp.read(8192)
>>> response.write_eof()
"""
writer = None
# 'filter' is being used for altering write() behaviour,
# add_chunking_filter adds deflate/gzip compression and
# add_compression_filter splits incoming data into a chunks.
filter = None
HOP_HEADERS = None # Must be set by subclass.
SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
sys.version_info, aiohttp.__version__)
status = None
status_line = b''
upgrade = False # Connection: UPGRADE
websocket = False # Upgrade: WEBSOCKET
has_chunked_hdr = False # Transfer-encoding: chunked
# subclass can enable auto sending headers with write() call,
# this is useful for wsgi's start_response implementation.
_send_headers = False
def __init__(self, transport, version, close):
self.transport = transport
self.version = version
self.closing = close
self.keepalive = None
self.chunked = False
self.length = None
self.headers = CIMultiDict()
self.headers_sent = False
self.output_length = 0
self._output_size = 0
def force_close(self):
self.closing = True
self.keepalive = False
def enable_chunked_encoding(self):
self.chunked = True
def keep_alive(self):
if self.keepalive is None:
if self.version < HttpVersion10:
# keep alive not supported at all
return False
if self.version == HttpVersion10:
if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
return True
else: # no headers means we close for Http 1.0
return False
else:
return not self.closing
else:
return self.keepalive
def is_headers_sent(self):
return self.headers_sent
def add_header(self, name, value):
"""Analyze headers. Calculate content length,
removes hop headers, etc."""
assert not self.headers_sent, 'headers have been sent already'
assert isinstance(name, str), \
'Header name should be a string, got {!r}'.format(name)
assert set(name).issubset(ASCIISET), \
'Header name should contain ASCII chars, got {!r}'.format(name)
assert isinstance(value, str), \
'Header {!r} should have string value, got {!r}'.format(
name, value)
name = name.strip().upper()
value = value.strip()
if name == hdrs.CONTENT_LENGTH:
self.length = int(value)
if name == hdrs.TRANSFER_ENCODING:
self.has_chunked_hdr = value.lower().strip() == 'chunked'
if name == hdrs.CONNECTION:
val = value.lower()
# handle websocket
if 'upgrade' in val:
self.upgrade = True
# connection keep-alive
elif 'close' in val:
self.keepalive = False
elif 'keep-alive' in val:
self.keepalive = True
elif name == hdrs.UPGRADE:
if 'websocket' in value.lower():
self.websocket = True
self.headers[name] = value
elif name not in self.HOP_HEADERS:
# ignore hop-by-hop headers
self.headers.add(name, value)
def add_headers(self, *headers):
"""Adds headers to a http message."""
for name, value in headers:
self.add_header(name, value)
def send_headers(self, _sep=': ', _end='\r\n'):
"""Writes headers to a stream. Constructs payload writer."""
# Chunked response is only for HTTP/1.1 clients or newer
# and there is no Content-Length header is set.
# Do not use chunked responses when the response is guaranteed to
# not have a response body (304, 204).
assert not self.headers_sent, 'headers have been sent already'
self.headers_sent = True
if self.chunked or (self.length is None and
self.version >= HttpVersion11 and
self.status not in (304, 204)):
self.writer = self._write_chunked_payload()
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
elif self.length is not None:
self.writer = self._write_length_payload(self.length)
else:
self.writer = self._write_eof_payload()
next(self.writer)
self._add_default_headers()
# status + headers
headers = ''.join(itertools.chain(
(self.status_line,),
*((k, _sep, v, _end) for k, v in self.headers.items())))
headers = headers.encode('utf-8') + b'\r\n'
self.output_length += len(headers)
self.transport.write(headers)
def _add_default_headers(self):
# set the connection header
if self.upgrade:
connection = 'upgrade'
elif not self.closing if self.keepalive is None else self.keepalive:
connection = 'keep-alive'
else:
connection = 'close'
self.headers[hdrs.CONNECTION] = connection
def write(self, chunk, *,
drain=False, EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Writes chunk of data to a stream by using different writers.
writer uses filter to modify chunk of data.
write_eof() indicates end of stream.
writer can't be used after write_eof() method being called.
write() return drain future.
"""
assert (isinstance(chunk, (bytes, bytearray)) or
chunk is EOF_MARKER), chunk
size = self.output_length
if self._send_headers and not self.headers_sent:
self.send_headers()
assert self.writer is not None, 'send_headers() is not called.'
if self.filter:
chunk = self.filter.send(chunk)
while chunk not in (EOF_MARKER, EOL_MARKER):
self.writer.send(chunk)
chunk = next(self.filter)
else:
if chunk is not EOF_MARKER:
self.writer.send(chunk)
self._output_size += self.output_length - size
if self._output_size > 64 * 1024:
if drain:
self._output_size = 0
return self.transport.drain()
return ()
def write_eof(self):
self.write(EOF_MARKER)
try:
self.writer.throw(aiohttp.EofStream())
except StopIteration:
pass
return self.transport.drain()
def _write_chunked_payload(self):
"""Write data in chunked transfer encoding."""
while True:
try:
chunk = yield
except aiohttp.EofStream:
self.transport.write(b'0\r\n\r\n')
self.output_length += 5
break
chunk = bytes(chunk)
chunk_len = '{:x}\r\n'.format(len(chunk)).encode('ascii')
self.transport.write(chunk_len)
self.transport.write(chunk)
self.transport.write(b'\r\n')
self.output_length += len(chunk_len) + len(chunk) + 2
def _write_length_payload(self, length):
"""Write specified number of bytes to a stream."""
while True:
try:
chunk = yield
except aiohttp.EofStream:
break
if length:
l = len(chunk)
if length >= l:
self.transport.write(chunk)
self.output_length += l
length = length-l
else:
self.transport.write(chunk[:length])
self.output_length += length
length = 0
def _write_eof_payload(self):
while True:
try:
chunk = yield
except aiohttp.EofStream:
break
self.transport.write(chunk)
self.output_length += len(chunk)
@wrap_payload_filter
def add_chunking_filter(self, chunk_size=16*1024, *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Split incoming stream into chunks."""
buf = bytearray()
chunk = yield
while True:
if chunk is EOF_MARKER:
if buf:
yield buf
yield EOF_MARKER
else:
buf.extend(chunk)
while len(buf) >= chunk_size:
chunk = bytes(buf[:chunk_size])
del buf[:chunk_size]
yield chunk
chunk = yield EOL_MARKER
@wrap_payload_filter
def add_compression_filter(self, encoding='deflate', *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Compress incoming stream with deflate or gzip encoding."""
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
zcomp = zlib.compressobj(wbits=zlib_mode)
chunk = yield
while True:
if chunk is EOF_MARKER:
yield zcomp.flush()
chunk = yield EOF_MARKER
else:
yield zcomp.compress(chunk)
chunk = yield EOL_MARKER
class Response(HttpMessage):
"""Create http response message.
Transport is a socket stream transport. status is a response status code,
status has to be integer value. http_version is a tuple that represents
http version, (1, 0) stands for HTTP/1.0 and (1, 1) is for HTTP/1.1
"""
HOP_HEADERS = ()
@staticmethod
def calc_reason(status):
record = RESPONSES.get(status)
if record is not None:
reason = record[0]
else:
reason = str(status)
return reason
def __init__(self, transport, status,
http_version=HttpVersion11, close=False, reason=None):
super().__init__(transport, http_version, close)
self.status = status
if reason is None:
reason = self.calc_reason(status)
self.reason = reason
self.status_line = 'HTTP/{}.{} {} {}\r\n'.format(
http_version[0], http_version[1], status, reason)
def _add_default_headers(self):
super()._add_default_headers()
if hdrs.DATE not in self.headers:
# format_date_time(None) is quite expensive
self.headers.setdefault(hdrs.DATE, format_date_time(None))
self.headers.setdefault(hdrs.SERVER, self.SERVER_SOFTWARE)
class Request(HttpMessage):
HOP_HEADERS = ()
def __init__(self, transport, method, path,
http_version=HttpVersion11, close=False):
# set the default for HTTP 1.0 to be different
# will only be overwritten with keep-alive header
if http_version < HttpVersion11:
close = True
super().__init__(transport, http_version, close)
self.method = method
self.path = path
self.status_line = '{0} {1} HTTP/{2[0]}.{2[1]}\r\n'.format(
method, path, http_version)
def _add_default_headers(self):
super()._add_default_headers()
self.headers.setdefault(hdrs.USER_AGENT, self.SERVER_SOFTWARE)
| gpl-3.0 |
catmaid/catpy | tests/test_catmaid_client_application.py | 1 | 1603 | from __future__ import absolute_import
try:
import mock
except ImportError:
from unittest import mock
import pytest
from catpy.client import CatmaidClient
from catpy.applications.base import CatmaidClientApplication
PROJECT_ID = 10
BASE_URL = "http://not-catmaid.org"
@pytest.fixture
def catmaid_mock():
catmaid = mock.Mock()
catmaid.project_id = PROJECT_ID
catmaid.base_url = BASE_URL
return catmaid
@pytest.fixture
def ConcreteApp():
class Subclass(CatmaidClientApplication):
pass
return Subclass
def test_property_passthrough(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
assert app.project_id == catmaid_mock.project_id == PROJECT_ID
assert app.base_url == catmaid_mock.base_url == BASE_URL
def test_get_post_call_fetch(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
rel_url = "potato"
app.get(rel_url, params=None, raw=False)
catmaid_mock.fetch.assert_called_with(rel_url, method="GET", data=None, raw=False)
app.post(rel_url, data=None, raw=False)
catmaid_mock.fetch.assert_called_with(rel_url, method="POST", data=None, raw=False)
def test_fetch_passthrough(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
args = (1, 2)
kwargs = {"a": 1}
app.fetch(*args, **kwargs)
catmaid_mock.fetch.assert_called_with(*args, **kwargs)
def test_from_json(ConcreteApp):
cred_path = "cred/path.json"
with mock.patch.object(CatmaidClient, "from_json") as from_json:
ConcreteApp.from_json(cred_path)
from_json.assert_called_with(cred_path)
| mit |
devsar/ae-people | apps/stats/views.py | 1 | 1376 | """
Stats views
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import simplejson
from django.utils.translation import ugettext_lazy as _
from google.appengine.api import taskqueue
from google.appengine.ext.deferred import deferred
from country.models import Country, COUNTRIES_CODE
from users.models import Developer
from country.models import Country
from stats.models import DeveloperStats, TagStats
def update_stats(request):
"""
Update stats trigger view
"""
DeveloperStats.update()
return HttpResponse("")
def view_stats(request):
"""
Show AppEngine general stats
"""
countries = Country.all().filter("total >", 0).order("-total").fetch(250)
#Get last stats
stats = DeveloperStats.all().order("-timestamp").get()
tags = TagStats.all().filter("developer_stats =", stats)
tags = tags.order("-total").fetch(20)
return render_to_response("stats/stats.html",
{'stats': stats,
'countries': countries,
'tags': tags},
RequestContext(request))
| apache-2.0 |
BIT-SYS/gem5-spm-module | src/mem/cache/prefetch/Prefetcher.py | 10 | 3558 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
from ClockedObject import ClockedObject
from m5.params import *
from m5.proxy import *
class BasePrefetcher(ClockedObject):
type = 'BasePrefetcher'
abstract = True
cxx_header = "mem/cache/prefetch/base.hh"
size = Param.Int(100,
"Number of entries in the hardware prefetch queue")
cross_pages = Param.Bool(False,
"Allow prefetches to cross virtual page boundaries")
serial_squash = Param.Bool(False,
"Squash prefetches with a later time on a subsequent miss")
degree = Param.Int(1,
"Degree of the prefetch depth")
latency = Param.Cycles('1', "Latency of the prefetcher")
use_master_id = Param.Bool(True,
"Use the master id to separate calculations of prefetches")
data_accesses_only = Param.Bool(False,
"Only prefetch on data not on instruction accesses")
sys = Param.System(Parent.any, "System this device belongs to")
class GHBPrefetcher(BasePrefetcher):
type = 'GHBPrefetcher'
cxx_class = 'GHBPrefetcher'
cxx_header = "mem/cache/prefetch/ghb.hh"
class StridePrefetcher(BasePrefetcher):
type = 'StridePrefetcher'
cxx_class = 'StridePrefetcher'
cxx_header = "mem/cache/prefetch/stride.hh"
class TaggedPrefetcher(BasePrefetcher):
type = 'TaggedPrefetcher'
cxx_class = 'TaggedPrefetcher'
cxx_header = "mem/cache/prefetch/tagged.hh"
| bsd-3-clause |
ryanbauman/pybombs | app_store.py | 6 | 6805 | #!/usr/bin/env python2
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from PyQt4.QtCore import Qt;
import PyQt4.QtCore as QtCore;
from PyQt4 import QtCore
import PyQt4.QtGui as QtGui
import sys,time,threading
import os.path
from mod_pybombs import *;
try:
print pybombs_ops.config_get("static")
except:
pybombs_ops.config_set("static","False")
recipe_loader.load_all();
class AppList(QtGui.QWidget):
def __init__(self, parent, name):
super(AppList, self).__init__()
self.parent = parent;
self.lay = QtGui.QGridLayout();
self.setLayout(self.lay);
self.idx = 0;
self.cbd = {};
def cb(self):
self._cb();
def addButton(self, name, callback, descr):
self._cb = callback;
pkgimg = "img/" + name + ".png";
if os.path.exists(pkgimg):
pixmap = QtGui.QPixmap(pkgimg);
else:
defaultimg = "img/unknown.png";
pixmap = QtGui.QPixmap(defaultimg);
icon = QtGui.QIcon(pixmap);
button = QtGui.QToolButton();
action = QtGui.QAction( icon, str(name), self );
if descr is not None:
action.setToolTip(descr)
action.setStatusTip('Install App')
button.setDefaultAction(action);
button.setToolButtonStyle(Qt.ToolButtonTextUnderIcon);
button.setIconSize(QtCore.QSize(self.parent.iconsize,self.parent.iconsize));
button.setAutoRaise(True);
self.connect(action, QtCore.SIGNAL("triggered()"), callback);
self.lay.addWidget(button, self.idx/self.parent.width, self.idx%self.parent.width);
self.idx = self.idx + 1;
class Task(QtCore.QThread):
def __init__(self, parent, name, message):
QtCore.QThread.__init__(self, parent=parent)
self.parent = parent;
self.name = name;
self.message = message
def cb(self):
# set up dialog...
self.progress = QtGui.QProgressDialog("%s %s, please wait..."%(self.message,self.name), "Abort", 0, 100, self.parent)
self.progress.setWindowModality(Qt.WindowModal)
self.progress.setValue(50);
self.progress.show()
self.progress.raise_()
self.progress.activateWindow()
# connect signal for when we are finished ...
QtCore.QObject.connect(self,
QtCore.SIGNAL("finished()"),
self.progress.close)
QtCore.QObject.connect(self,
QtCore.SIGNAL("finished()"),
self.parent.refresh)
self.start();
class Installer(Task):
def __init__(self, parent, name):
Task.__init__(self, parent, name, "Installing")
def run(self):
print "installing "+ self.name;
install(self.name);
class Remover(Task):
def __init__(self, parent, name):
Task.__init__(self, parent, name, "Removing")
def cb(self):
print "removing "+ self.name;
remove(self.name);
class ASMain(QtGui.QWidget):
__pyqtSignals__ = ("refresh()")
def __init__(self, app):
self.app = app
self.width = 8;
self.iconsize = 72
super(ASMain, self).__init__()
self.setWindowTitle("Python Build Overlay Managed Bundle System - APP STORE GUI");
self.layout = QtGui.QVBoxLayout(self);
self.setLayout(self.layout);
self.menu = QtGui.QMenuBar(self);
# Official GNU Radio logo
# please see http://www.gnuradio.org
pixmap = QtGui.QPixmap("img/logo.png")
lbl = QtGui.QLabel(self)
lbl.setPixmap(pixmap)
l2 = QtGui.QHBoxLayout();
l2.addWidget(QtGui.QLabel(" "));
l2.addWidget(lbl);
l2.addWidget(QtGui.QLabel(" "));
self.tw = QtGui.QTabWidget(self);
self.scrollArea = QtGui.QScrollArea()
self.scrollArea.setWidget(self.tw)
self.scrollArea.setWidgetResizable(True);
self.layout.setMargin(0);
self.layout.addWidget(self.menu);
self.layout.addLayout(l2);
self.layout.addWidget(self.scrollArea);
# Populate Apps
self.populate_tabs();
# Populate the menu
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.triggered.connect(QtGui.qApp.quit)
fileMenu = self.menu.addMenu('&File');
fileMenu.addAction(exitAction);
reloadAction = QtGui.QAction('&Refresh State', self)
reloadAction.triggered.connect(self.reload_op)
toolsMenu = self.menu.addMenu('&Tools');
toolsMenu.addAction(reloadAction);
self.resize(self.iconsize * self.width * 2, self.iconsize * self.width)
self.show();
def reload_op(self):
inv.loadc();
recipe_loader.load_all();
self.refresh();
def refresh(self):
self.populate_tabs();
def populate_tabs(self):
self.tw.clear();
#categories = ["baseline", "common"]
categories = ["common", "hardware", "application"]
cbs = {};
pages = [];
for c in categories:
pages.append( "Available %s Apps"%(c) );
pages.append( "Installed %s Apps"%(c) );
#pages = ["Available Apps", "Installed Apps"];
tabw = [];
for p in pages:
pp = AppList(self, p);
tabw.append(pp);
self.tw.addTab(pp, p);
catpkg = get_catpkgs()
for c in categories:
cbs[c] = {};
cidx = categories.index(c);
pkgs = catpkg[c];
pkgs.sort();
for p in pkgs:
installed = global_recipes[p].satisfy();
if(installed):
cbs[c][p] = Remover(self, p);
pcidx = 2*cidx+1;
else:
cbs[c][p] = Installer(self, p);
pcidx = 2*cidx;
tabw[pcidx].addButton(p, cbs[c][p].cb, global_recipes[p].description);
self.cbs = cbs;
app = QtGui.QApplication(sys.argv)
mw = ASMain(app);
sys.exit(app.exec_());
| gpl-3.0 |
dc3-plaso/plaso | tests/engine/engine.py | 1 | 4086 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests the engine."""
import unittest
try:
from guppy import hpy
except ImportError:
hpy = None
from dfvfs.helpers import fake_file_system_builder
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.path import path_spec
from dfvfs.resolver import context
from dfvfs.vfs import file_system
from plaso.engine import engine
from tests import test_lib as shared_test_lib
class TestEngine(engine.BaseEngine):
"""Class that defines the processing engine for testing."""
def __init__(self):
"""Initialize a test engine object."""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = shared_test_lib.GetTestFilePath([u'SOFTWARE'])
file_system_builder.AddFileReadData(
u'/Windows/System32/config/SOFTWARE', test_file_path)
test_file_path = shared_test_lib.GetTestFilePath([u'SYSTEM'])
file_system_builder.AddFileReadData(
u'/Windows/System32/config/SYSTEM', test_file_path)
super(TestEngine, self).__init__()
self._file_system = file_system_builder.file_system
self._mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location=u'/')
def GetSourceFileSystem(self, source_path_spec, resolver_context=None):
"""Retrieves the file system of the source.
Args:
source_path_spec (dfvfs.PathSpec): path specifications of the sources
to process.
resolver_context (dfvfs.Context): resolver context.
Returns:
tuple: containing:
dfvfs.FileSystem: file system
path.PathSpec: mount point path specification. The mount point path
specification refers to either a directory or a volume on a storage
media device or image. It is needed by the dfVFS file system
searcher (FileSystemSearcher) to indicate the base location of
the file system
"""
self._file_system.Open(self._mount_point)
return self._file_system, self._mount_point
class BaseEngineTest(shared_test_lib.BaseTestCase):
"""Tests for the engine object."""
# pylint: disable=protected-access
@shared_test_lib.skipUnlessHasTestFile([u'ímynd.dd'])
def testGetSourceFileSystem(self):
"""Tests the GetSourceFileSystem function."""
test_engine = engine.BaseEngine()
source_path = self._GetTestFilePath([u'ímynd.dd'])
os_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path)
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/',
parent=os_path_spec)
resolver_context = context.Context()
test_file_system, test_mount_point = test_engine.GetSourceFileSystem(
source_path_spec, resolver_context=resolver_context)
self.assertIsNotNone(test_file_system)
self.assertIsInstance(test_file_system, file_system.FileSystem)
self.assertIsNotNone(test_mount_point)
self.assertIsInstance(test_mount_point, path_spec.PathSpec)
test_file_system.Close()
with self.assertRaises(RuntimeError):
test_engine.GetSourceFileSystem(None)
@shared_test_lib.skipUnlessHasTestFile([u'SOFTWARE'])
@shared_test_lib.skipUnlessHasTestFile([u'SYSTEM'])
def testPreprocessSources(self):
"""Tests the PreprocessSources function."""
test_engine = TestEngine()
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location=u'/')
test_engine.PreprocessSources([source_path_spec])
self.assertEqual(test_engine.knowledge_base.platform, u'Windows')
test_engine.PreprocessSources([None])
def testSupportsMemoryProfiling(self):
"""Tests the SupportsMemoryProfiling function."""
test_engine = engine.BaseEngine()
expected_result = hpy is not None
result = test_engine.SupportsMemoryProfiling()
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
svensaeger/Hatschi | wetterstation2.py | 1 | 1364 | from tkinter import *
import serial
import time
def ausgeben(daten):
datenEinzeln = daten.split()
root = Tk()
root.attributes("-fullscreen", True)
root.config(bg = "light green")
frameo = Frame(root)
frameu = Frame(root, bg = "light green")
temp = "0"
luftfeu = "0"
luftdruck = "0"
gas = "0"
regen = "0"
Label(frameo,
text="Die Daten der Hatschi Wetterstation:",
fg = "green",
bg = "dark blue",
font = "Times 50").pack()
Label(frameu,
text="Temperatur: " + temp,
fg = "green",
bg = "yellow",
font = "Times 50").pack()
Label(frameu,
text="Luftfeuchtigkeit: " + luftfeu,
fg = "green",
bg = "red",
font = "Times 50").pack()
Label(frameu,
text="Luftdruck: " + luftdruck,
fg = "green",
bg = "light blue",
font = "Times 50").pack()
Label(frameu,
text="Gas: " + gas,
fg = "green",
bg = "blue",
font = "Times 50").pack()
Label(frameu,
text="Regen: " + regen,
fg = "green",
bg = "yellow",
font = "Times 50").pack()
frameo.pack()
frameu.pack(pady = 20)
root.mainloop()
s = serial.Serial('/dev/ttyACMO', 9600);
try:
s.open()
except serial-seralutil.SerialException:
s.close()
s.open()
time.sleep(5)
try:
while True:
response = s.readline()
ausgeben(response)
except KeyboardInterrupt:
s.close()
| gpl-3.0 |
MrHamdulay/daily-problems | yaseen/problem-3-binary-search.py | 1 | 1162 | class Node:
def __init__(self, value):
self.value = float(value)
self.parent = None
self.children = []
def addChild(self, child):
self.children.append(child)
def __repr__(self):
return '<value: %s children: (%s)>' % (self.value, len(self.children))
@property
def right(self):
return self.children[1] if len(self.children) >= 2 else None
@property
def left(self):
return self.children[0] if len(self.children) >= 1 else None
input = '''
1 0
1 2
0 -10
2 1.5
'''
nodes = {'1': Node('1')}
for line in input.split('\n'):
if line:
parent, value = line.split()
if value in nodes:
node = nodes[value]
else:
node = Node(value)
nodes[value] = node
node.parent = nodes[parent]
nodes[parent].addChild(node)
def isBinarySearchTree(node, left=-1e10, right=1e10):
if node.value < left or node.value > right:
return False
if node.left and not isBinarySearchTree(node.left, left, min(node.value, right)):
return False
if node.right and not isBinarySearchTree(node.right, max(node.value, left), right):
return False
return True
print isBinarySearchTree(nodes['1'])
| mit |
mozvip/CouchPotatoServer | libs/html5lib/treebuilders/simpletree.py | 100 | 7480 | import _base
from html5lib.constants import voidElements, namespaces, prefixes
from xml.sax.saxutils import escape
# Really crappy basic implementation of a DOM-core like thing
class Node(_base.Node):
type = -1
def __init__(self, name):
self.name = name
self.parent = None
self.value = None
self.childNodes = []
self._flags = []
def __iter__(self):
for node in self.childNodes:
yield node
for item in node:
yield item
def __unicode__(self):
return self.name
def toxml(self):
raise NotImplementedError
def printTree(self, indent=0):
tree = '\n|%s%s' % (' '* indent, unicode(self))
for child in self.childNodes:
tree += child.printTree(indent + 2)
return tree
def appendChild(self, node):
assert isinstance(node, Node)
if (isinstance(node, TextNode) and self.childNodes and
isinstance(self.childNodes[-1], TextNode)):
self.childNodes[-1].value += node.value
else:
self.childNodes.append(node)
node.parent = self
def insertText(self, data, insertBefore=None):
assert isinstance(data, unicode), "data %s is of type %s expected unicode"%(repr(data), type(data))
if insertBefore is None:
self.appendChild(TextNode(data))
else:
self.insertBefore(TextNode(data), insertBefore)
def insertBefore(self, node, refNode):
index = self.childNodes.index(refNode)
if (isinstance(node, TextNode) and index > 0 and
isinstance(self.childNodes[index - 1], TextNode)):
self.childNodes[index - 1].value += node.value
else:
self.childNodes.insert(index, node)
node.parent = self
def removeChild(self, node):
try:
self.childNodes.remove(node)
except:
# XXX
raise
node.parent = None
def cloneNode(self):
raise NotImplementedError
def hasContent(self):
"""Return true if the node has children or text"""
return bool(self.childNodes)
def getNameTuple(self):
if self.namespace == None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class Document(Node):
type = 1
def __init__(self):
Node.__init__(self, None)
def __str__(self):
return "#document"
def __unicode__(self):
return str(self)
def appendChild(self, child):
Node.appendChild(self, child)
def toxml(self, encoding="utf=8"):
result = ""
for child in self.childNodes:
result += child.toxml()
return result.encode(encoding)
def hilite(self, encoding="utf-8"):
result = "<pre>"
for child in self.childNodes:
result += child.hilite()
return result.encode(encoding) + "</pre>"
def printTree(self):
tree = unicode(self)
for child in self.childNodes:
tree += child.printTree(2)
return tree
def cloneNode(self):
return Document()
class DocumentFragment(Document):
type = 2
def __str__(self):
return "#document-fragment"
def __unicode__(self):
return str(self)
def cloneNode(self):
return DocumentFragment()
class DocumentType(Node):
type = 3
def __init__(self, name, publicId, systemId):
Node.__init__(self, name)
self.publicId = publicId
self.systemId = systemId
def __unicode__(self):
if self.publicId or self.systemId:
publicId = self.publicId or ""
systemId = self.systemId or ""
return """<!DOCTYPE %s "%s" "%s">"""%(
self.name, publicId, systemId)
else:
return u"<!DOCTYPE %s>" % self.name
toxml = __unicode__
def hilite(self):
return '<code class="markup doctype"><!DOCTYPE %s></code>' % self.name
def cloneNode(self):
return DocumentType(self.name, self.publicId, self.systemId)
class TextNode(Node):
type = 4
def __init__(self, value):
Node.__init__(self, None)
self.value = value
def __unicode__(self):
return u"\"%s\"" % self.value
def toxml(self):
return escape(self.value)
hilite = toxml
def cloneNode(self):
return TextNode(self.value)
class Element(Node):
type = 5
def __init__(self, name, namespace=None):
Node.__init__(self, name)
self.namespace = namespace
self.attributes = {}
def __unicode__(self):
if self.namespace == None:
return u"<%s>" % self.name
else:
return u"<%s %s>"%(prefixes[self.namespace], self.name)
def toxml(self):
result = '<' + self.name
if self.attributes:
for name,value in self.attributes.iteritems():
result += u' %s="%s"' % (name, escape(value,{'"':'"'}))
if self.childNodes:
result += '>'
for child in self.childNodes:
result += child.toxml()
result += u'</%s>' % self.name
else:
result += u'/>'
return result
def hilite(self):
result = '<<code class="markup element-name">%s</code>' % self.name
if self.attributes:
for name, value in self.attributes.iteritems():
result += ' <code class="markup attribute-name">%s</code>=<code class="markup attribute-value">"%s"</code>' % (name, escape(value, {'"':'"'}))
if self.childNodes:
result += ">"
for child in self.childNodes:
result += child.hilite()
elif self.name in voidElements:
return result + ">"
return result + '</<code class="markup element-name">%s</code>>' % self.name
def printTree(self, indent):
tree = '\n|%s%s' % (' '*indent, unicode(self))
indent += 2
if self.attributes:
for name, value in sorted(self.attributes.iteritems()):
if isinstance(name, tuple):
name = "%s %s"%(name[0], name[1])
tree += '\n|%s%s="%s"' % (' ' * indent, name, value)
for child in self.childNodes:
tree += child.printTree(indent)
return tree
def cloneNode(self):
newNode = Element(self.name)
if hasattr(self, 'namespace'):
newNode.namespace = self.namespace
for attr, value in self.attributes.iteritems():
newNode.attributes[attr] = value
return newNode
class CommentNode(Node):
type = 6
def __init__(self, data):
Node.__init__(self, None)
self.data = data
def __unicode__(self):
return "<!-- %s -->" % self.data
def toxml(self):
return "<!--%s-->" % self.data
def hilite(self):
return '<code class="markup comment"><!--%s--></code>' % escape(self.data)
def cloneNode(self):
return CommentNode(self.data)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = Element
commentClass = CommentNode
fragmentClass = DocumentFragment
def testSerializer(self, node):
return node.printTree()
| gpl-3.0 |
rogerscristo/BotFWD | env/lib/python3.6/site-packages/future/backports/urllib/parse.py | 69 | 35794 | """
Ported using Python-Future from the Python 3.3 standard library.
Parse (absolute and relative) URLs.
urlparse module is based upon the following RFC specifications.
RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
and L. Masinter, January 2005.
RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter
and L.Masinter, December 1999.
RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
Berners-Lee, R. Fielding, and L. Masinter, August 1998.
RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998.
RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
1995.
RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
McCahill, December 1994
RFC 3986 is considered the current standard and any future changes to
urlparse module should conform with it. The urlparse module is
currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
"""
from __future__ import absolute_import, division, unicode_literals
from future.builtins import bytes, chr, dict, int, range, str
from future.utils import raise_with_traceback
import re
import sys
import collections
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "urlencode", "parse_qs",
"parse_qsl", "quote", "quote_plus", "quote_from_bytes",
"unquote", "unquote_plus", "unquote_to_bytes"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
'wais', 'file', 'https', 'shttp', 'mms',
'prospero', 'rtsp', 'rtspu', '', 'sftp',
'svn', 'svn+ssh']
uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
'imap', 'wais', 'file', 'mms', 'https', 'shttp',
'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh']
uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
'mms', '', 'sftp', 'tel']
# These are not actually used anymore, but should stay for backwards
# compatibility. (They are undocumented, but have a public-looking name.)
non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
'nntp', 'wais', 'https', 'shttp', 'snews',
'file', 'prospero', '']
# Characters valid in scheme names
scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'+-.')
# XXX: Consider replacing with functools.lru_cache
MAX_CACHE_SIZE = 20
_parse_cache = {}
def clear_cache():
"""Clear the parse cache and the quoters cache."""
_parse_cache.clear()
_safe_quoters.clear()
# Helpers for bytes handling
# For 3.2, we deliberately require applications that
# handle improperly quoted URLs to do their own
# decoding and encoding. If valid use cases are
# presented, we may relax this by using latin-1
# decoding internally for 3.3
_implicit_encoding = 'ascii'
_implicit_errors = 'strict'
def _noop(obj):
return obj
def _encode_result(obj, encoding=_implicit_encoding,
errors=_implicit_errors):
return obj.encode(encoding, errors)
def _decode_args(args, encoding=_implicit_encoding,
errors=_implicit_errors):
return tuple(x.decode(encoding, errors) if x else '' for x in args)
def _coerce_args(*args):
# Invokes decode if necessary to create str args
# and returns the coerced inputs along with
# an appropriate result coercion function
# - noop for str inputs
# - encoding function otherwise
str_input = isinstance(args[0], str)
for arg in args[1:]:
# We special-case the empty string to support the
# "scheme=''" default argument to some functions
if arg and isinstance(arg, str) != str_input:
raise TypeError("Cannot mix str and non-str arguments")
if str_input:
return args + (_noop,)
return _decode_args(args) + (_encode_result,)
# Result objects are more helpful than simple tuples
class _ResultMixinStr(object):
"""Standard approach to encoding parsed results from str to bytes"""
__slots__ = ()
def encode(self, encoding='ascii', errors='strict'):
return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
class _ResultMixinBytes(object):
"""Standard approach to decoding parsed results from bytes to str"""
__slots__ = ()
def decode(self, encoding='ascii', errors='strict'):
return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
class _NetlocResultMixinBase(object):
"""Shared methods for the parsed result objects containing a netloc element"""
__slots__ = ()
@property
def username(self):
return self._userinfo[0]
@property
def password(self):
return self._userinfo[1]
@property
def hostname(self):
hostname = self._hostinfo[0]
if not hostname:
hostname = None
elif hostname is not None:
hostname = hostname.lower()
return hostname
@property
def port(self):
port = self._hostinfo[1]
if port is not None:
port = int(port, 10)
# Return None on an illegal port
if not ( 0 <= port <= 65535):
return None
return port
class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
__slots__ = ()
@property
def _userinfo(self):
netloc = self.netloc
userinfo, have_info, hostinfo = netloc.rpartition('@')
if have_info:
username, have_password, password = userinfo.partition(':')
if not have_password:
password = None
else:
username = password = None
return username, password
@property
def _hostinfo(self):
netloc = self.netloc
_, _, hostinfo = netloc.rpartition('@')
_, have_open_br, bracketed = hostinfo.partition('[')
if have_open_br:
hostname, _, port = bracketed.partition(']')
_, have_port, port = port.partition(':')
else:
hostname, have_port, port = hostinfo.partition(':')
if not have_port:
port = None
return hostname, port
class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes):
__slots__ = ()
@property
def _userinfo(self):
netloc = self.netloc
userinfo, have_info, hostinfo = netloc.rpartition(b'@')
if have_info:
username, have_password, password = userinfo.partition(b':')
if not have_password:
password = None
else:
username = password = None
return username, password
@property
def _hostinfo(self):
netloc = self.netloc
_, _, hostinfo = netloc.rpartition(b'@')
_, have_open_br, bracketed = hostinfo.partition(b'[')
if have_open_br:
hostname, _, port = bracketed.partition(b']')
_, have_port, port = port.partition(b':')
else:
hostname, have_port, port = hostinfo.partition(b':')
if not have_port:
port = None
return hostname, port
from collections import namedtuple
_DefragResultBase = namedtuple('DefragResult', 'url fragment')
_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment')
_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment')
# For backwards compatibility, alias _NetlocResultMixinStr
# ResultBase is no longer part of the documented API, but it is
# retained since deprecating it isn't worth the hassle
ResultBase = _NetlocResultMixinStr
# Structured result objects for string data
class DefragResult(_DefragResultBase, _ResultMixinStr):
__slots__ = ()
def geturl(self):
if self.fragment:
return self.url + '#' + self.fragment
else:
return self.url
class SplitResult(_SplitResultBase, _NetlocResultMixinStr):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class ParseResult(_ParseResultBase, _NetlocResultMixinStr):
__slots__ = ()
def geturl(self):
return urlunparse(self)
# Structured result objects for bytes data
class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
__slots__ = ()
def geturl(self):
if self.fragment:
return self.url + b'#' + self.fragment
else:
return self.url
class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes):
__slots__ = ()
def geturl(self):
return urlunparse(self)
# Set up the encode/decode result pairs
def _fix_result_transcoding():
_result_pairs = (
(DefragResult, DefragResultBytes),
(SplitResult, SplitResultBytes),
(ParseResult, ParseResultBytes),
)
for _decoded, _encoded in _result_pairs:
_decoded._encoded_counterpart = _encoded
_encoded._decoded_counterpart = _decoded
_fix_result_transcoding()
del _fix_result_transcoding
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
url, scheme, _coerce_result = _coerce_args(url, scheme)
splitresult = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = splitresult
if scheme in uses_params and ';' in url:
url, params = _splitparams(url)
else:
params = ''
result = ParseResult(scheme, netloc, url, params, query, fragment)
return _coerce_result(result)
def _splitparams(url):
if '/' in url:
i = url.find(';', url.rfind('/'))
if i < 0:
return url, ''
else:
i = url.find(';')
return url[:i], url[i+1:]
def _splitnetloc(url, start=0):
delim = len(url) # position of end of domain part of url, default is end
for c in '/?#': # look for delimiters; the order is NOT important
wdelim = url.find(c, start) # find first of this delim
if wdelim >= 0: # if found
delim = min(delim, wdelim) # use earliest delim position
return url[start:delim], url[delim:] # return (domain, rest)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
url, scheme, _coerce_result = _coerce_args(url, scheme)
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return _coerce_result(cached)
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return _coerce_result(v)
for c in url[:i]:
if c not in scheme_chars:
break
else:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i+1:]
if not rest or any(c not in '0123456789' for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return _coerce_result(v)
def urlunparse(components):
"""Put a parsed URL back together again. This may result in a
slightly different, but equivalent URL, if the URL that was parsed
originally had redundant delimiters, e.g. a ? with an empty query
(the draft states that these are equivalent)."""
scheme, netloc, url, params, query, fragment, _coerce_result = (
_coerce_args(*components))
if params:
url = "%s;%s" % (url, params)
return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
def urlunsplit(components):
"""Combine the elements of a tuple as returned by urlsplit() into a
complete URL as a string. The data argument can be any five-item iterable.
This may result in a slightly different, but equivalent URL, if the URL that
was parsed originally had unnecessary delimiters (for example, a ? with an
empty query; the RFC states that these are equivalent)."""
scheme, netloc, url, query, fragment, _coerce_result = (
_coerce_args(*components))
if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
if url and url[:1] != '/': url = '/' + url
url = '//' + (netloc or '') + url
if scheme:
url = scheme + ':' + url
if query:
url = url + '?' + query
if fragment:
url = url + '#' + fragment
return _coerce_result(url)
def urljoin(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter."""
if not base:
return url
if not url:
return base
base, url, _coerce_result = _coerce_args(base, url)
bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
urlparse(base, '', allow_fragments)
scheme, netloc, path, params, query, fragment = \
urlparse(url, bscheme, allow_fragments)
if scheme != bscheme or scheme not in uses_relative:
return _coerce_result(url)
if scheme in uses_netloc:
if netloc:
return _coerce_result(urlunparse((scheme, netloc, path,
params, query, fragment)))
netloc = bnetloc
if path[:1] == '/':
return _coerce_result(urlunparse((scheme, netloc, path,
params, query, fragment)))
if not path and not params:
path = bpath
params = bparams
if not query:
query = bquery
return _coerce_result(urlunparse((scheme, netloc, path,
params, query, fragment)))
segments = bpath.split('/')[:-1] + path.split('/')
# XXX The stuff below is bogus in various ways...
if segments[-1] == '.':
segments[-1] = ''
while '.' in segments:
segments.remove('.')
while 1:
i = 1
n = len(segments) - 1
while i < n:
if (segments[i] == '..'
and segments[i-1] not in ('', '..')):
del segments[i-1:i+1]
break
i = i+1
else:
break
if segments == ['', '..']:
segments[-1] = ''
elif len(segments) >= 2 and segments[-1] == '..':
segments[-2:] = ['']
return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments),
params, query, fragment)))
def urldefrag(url):
"""Removes any existing fragment from URL.
Returns a tuple of the defragmented URL and the fragment. If
the URL contained no fragments, the second element is the
empty string.
"""
url, _coerce_result = _coerce_args(url)
if '#' in url:
s, n, p, a, q, frag = urlparse(url)
defrag = urlunparse((s, n, p, a, q, ''))
else:
frag = ''
defrag = url
return _coerce_result(DefragResult(defrag, frag))
_hexdig = '0123456789ABCDEFabcdef'
_hextobyte = dict(((a + b).encode(), bytes([int(a + b, 16)]))
for a in _hexdig for b in _hexdig)
def unquote_to_bytes(string):
"""unquote_to_bytes('abc%20def') -> b'abc def'."""
# Note: strings are encoded as UTF-8. This is only an issue if it contains
# unescaped non-ASCII characters, which URIs should not.
if not string:
# Is it a string-like object?
string.split
return bytes(b'')
if isinstance(string, str):
string = string.encode('utf-8')
### For Python-Future:
# It is already a byte-string object, but force it to be newbytes here on
# Py2:
string = bytes(string)
###
bits = string.split(b'%')
if len(bits) == 1:
return string
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(_hextobyte[item[:2]])
append(item[2:])
except KeyError:
append(b'%')
append(item)
return bytes(b'').join(res)
_asciire = re.compile('([\x00-\x7f]+)')
def unquote(string, encoding='utf-8', errors='replace'):
"""Replace %xx escapes by their single-character equivalent. The optional
encoding and errors parameters specify how to decode percent-encoded
sequences into Unicode characters, as accepted by the bytes.decode()
method.
By default, percent-encoded sequences are decoded with UTF-8, and invalid
sequences are replaced by a placeholder character.
unquote('abc%20def') -> 'abc def'.
"""
if '%' not in string:
string.split
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'replace'
bits = _asciire.split(string)
res = [bits[0]]
append = res.append
for i in range(1, len(bits), 2):
append(unquote_to_bytes(bits[i]).decode(encoding, errors))
append(bits[i + 1])
return ''.join(res)
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
encoding='utf-8', errors='replace'):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
encoding and errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
"""
parsed_result = {}
pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
encoding=encoding, errors=errors)
for name, value in pairs:
if name in parsed_result:
parsed_result[name].append(value)
else:
parsed_result[name] = [value]
return parsed_result
def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
encoding='utf-8', errors='replace'):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
encoding and errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
Returns a list, as G-d intended.
"""
qs, _coerce_result = _coerce_args(qs)
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError("bad query field: %r" % (name_value,))
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = nv[0].replace('+', ' ')
name = unquote(name, encoding=encoding, errors=errors)
name = _coerce_result(name)
value = nv[1].replace('+', ' ')
value = unquote(value, encoding=encoding, errors=errors)
value = _coerce_result(value)
r.append((name, value))
return r
def unquote_plus(string, encoding='utf-8', errors='replace'):
"""Like unquote(), but also replace plus signs by spaces, as required for
unquoting HTML form values.
unquote_plus('%7e/abc+def') -> '~/abc def'
"""
string = string.replace('+', ' ')
return unquote(string, encoding, errors)
_ALWAYS_SAFE = frozenset(bytes(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
b'abcdefghijklmnopqrstuvwxyz'
b'0123456789'
b'_.-'))
_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE)
_safe_quoters = {}
class Quoter(collections.defaultdict):
"""A mapping from bytes (in range(0,256)) to strings.
String values are percent-encoded byte values, unless the key < 128, and
in the "safe" set (either the specified safe set, or default set).
"""
# Keeps a cache internally, using defaultdict, for efficiency (lookups
# of cached keys don't call Python code at all).
def __init__(self, safe):
"""safe: bytes object."""
self.safe = _ALWAYS_SAFE.union(bytes(safe))
def __repr__(self):
# Without this, will just display as a defaultdict
return "<Quoter %r>" % dict(self)
def __missing__(self, b):
# Handle a cache miss. Store quoted string in cache and return.
res = chr(b) if b in self.safe else '%{0:02X}'.format(b)
self[b] = res
return res
def quote(string, safe='/', encoding=None, errors=None):
"""quote('abc def') -> 'abc%20def'
Each part of a URL, e.g. the path info, the query, etc., has a
different set of reserved characters that must be quoted.
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
the following reserved characters.
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
"$" | ","
Each of these characters is reserved in some component of a URL,
but not necessarily in all of them.
By default, the quote function is intended for quoting the path
section of a URL. Thus, it will not encode '/'. This character
is reserved, but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters.
string and safe may be either str or bytes objects. encoding must
not be specified if string is a str.
The optional encoding and errors parameters specify how to deal with
non-ASCII characters, as accepted by the str.encode method.
By default, encoding='utf-8' (characters are encoded with UTF-8), and
errors='strict' (unsupported characters raise a UnicodeEncodeError).
"""
if isinstance(string, str):
if not string:
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'strict'
string = string.encode(encoding, errors)
else:
if encoding is not None:
raise TypeError("quote() doesn't support 'encoding' for bytes")
if errors is not None:
raise TypeError("quote() doesn't support 'errors' for bytes")
return quote_from_bytes(string, safe)
def quote_plus(string, safe='', encoding=None, errors=None):
"""Like quote(), but also replace ' ' with '+', as required for quoting
HTML form values. Plus signs in the original string are escaped unless
they are included in safe. It also does not have safe default to '/'.
"""
# Check if ' ' in string, where string may either be a str or bytes. If
# there are no spaces, the regular quote will produce the right answer.
if ((isinstance(string, str) and ' ' not in string) or
(isinstance(string, bytes) and b' ' not in string)):
return quote(string, safe, encoding, errors)
if isinstance(safe, str):
space = str(' ')
else:
space = bytes(b' ')
string = quote(string, safe + space, encoding, errors)
return string.replace(' ', '+')
def quote_from_bytes(bs, safe='/'):
"""Like quote(), but accepts a bytes object rather than a str, and does
not perform string-to-bytes encoding. It always returns an ASCII string.
quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f'
"""
if not isinstance(bs, (bytes, bytearray)):
raise TypeError("quote_from_bytes() expected bytes")
if not bs:
return str('')
### For Python-Future:
bs = bytes(bs)
###
if isinstance(safe, str):
# Normalize 'safe' by converting to bytes and removing non-ASCII chars
safe = str(safe).encode('ascii', 'ignore')
else:
### For Python-Future:
safe = bytes(safe)
###
safe = bytes([c for c in safe if c < 128])
if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe):
return bs.decode()
try:
quoter = _safe_quoters[safe]
except KeyError:
_safe_quoters[safe] = quoter = Quoter(safe).__getitem__
return str('').join([quoter(char) for char in bs])
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
The query arg may be either a string or a bytes type. When query arg is a
string, the safe, encoding and error parameters are sent the quote_plus for
encoding.
"""
if hasattr(query, "items"):
query = query.items()
else:
# It's a bother at times that strings and string-like objects are
# sequences.
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# Zero-length sequences of all types will get here and succeed,
# but that's a minor nit. Since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty, va, tb = sys.exc_info()
raise_with_traceback(TypeError("not a valid non-string sequence "
"or mapping object"), tb)
l = []
if not doseq:
for k, v in query:
if isinstance(k, bytes):
k = quote_plus(k, safe)
else:
k = quote_plus(str(k), safe, encoding, errors)
if isinstance(v, bytes):
v = quote_plus(v, safe)
else:
v = quote_plus(str(v), safe, encoding, errors)
l.append(k + '=' + v)
else:
for k, v in query:
if isinstance(k, bytes):
k = quote_plus(k, safe)
else:
k = quote_plus(str(k), safe, encoding, errors)
if isinstance(v, bytes):
v = quote_plus(v, safe)
l.append(k + '=' + v)
elif isinstance(v, str):
v = quote_plus(v, safe, encoding, errors)
l.append(k + '=' + v)
else:
try:
# Is this a sufficient test for sequence-ness?
x = len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v), safe, encoding, errors)
l.append(k + '=' + v)
else:
# loop over the sequence
for elt in v:
if isinstance(elt, bytes):
elt = quote_plus(elt, safe)
else:
elt = quote_plus(str(elt), safe, encoding, errors)
l.append(k + '=' + elt)
return str('&').join(l)
# Utilities to parse URLs (most of these return None for missing parts):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
# splittype('type:opaquestring') --> 'type', 'opaquestring'
# splithost('//host[:port]/path') --> 'host[:port]', '/path'
# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
# splitpasswd('user:passwd') -> 'user', 'passwd'
# splitport('host:port') --> 'host', 'port'
# splitquery('/path?query') --> '/path', 'query'
# splittag('/path#tag') --> '/path', 'tag'
# splitattr('/path;attr1=value1;attr2=value2;...') ->
# '/path', ['attr1=value1', 'attr2=value2', ...]
# splitvalue('attr=value') --> 'attr', 'value'
# urllib.parse.unquote('abc%20def') -> 'abc def'
# quote('abc def') -> 'abc%20def')
def to_bytes(url):
"""to_bytes(u"URL") --> 'URL'."""
# Most URL schemes require ASCII. If that changes, the conversion
# can be relaxed.
# XXX get rid of to_bytes()
if isinstance(url, str):
try:
url = url.encode("ASCII").decode()
except UnicodeError:
raise UnicodeError("URL " + repr(url) +
" contains non-ASCII characters")
return url
def unwrap(url):
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
url = str(url).strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
if url[:4] == 'URL:': url = url[4:].strip()
return url
_typeprog = None
def splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
import re
_typeprog = re.compile('^([^/:]+):')
match = _typeprog.match(url)
if match:
scheme = match.group(1)
return scheme.lower(), url[len(scheme) + 1:]
return None, url
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match:
host_port = match.group(1)
path = match.group(2)
if path and not path.startswith('/'):
path = '/' + path
return host_port, path
return None, url
_userprog = None
def splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
global _userprog
if _userprog is None:
import re
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match: return match.group(1, 2)
return None, host
_passwdprog = None
def splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
global _passwdprog
if _passwdprog is None:
import re
_passwdprog = re.compile('^([^:]*):(.*)$',re.S)
match = _passwdprog.match(user)
if match: return match.group(1, 2)
return user, None
# splittag('/path#tag') --> '/path', 'tag'
_portprog = None
def splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
import re
_portprog = re.compile('^(.*):([0-9]+)$')
match = _portprog.match(host)
if match: return match.group(1, 2)
return host, None
_nportprog = None
def splitnport(host, defport=-1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
Return None if ':' but not a valid number."""
global _nportprog
if _nportprog is None:
import re
_nportprog = re.compile('^(.*):(.*)$')
match = _nportprog.match(host)
if match:
host, port = match.group(1, 2)
try:
if not port: raise ValueError("no digits")
nport = int(port)
except ValueError:
nport = None
return host, nport
return host, defport
_queryprog = None
def splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
import re
_queryprog = re.compile('^(.*)\?([^?]*)$')
match = _queryprog.match(url)
if match: return match.group(1, 2)
return url, None
_tagprog = None
def splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
global _tagprog
if _tagprog is None:
import re
_tagprog = re.compile('^(.*)#([^#]*)$')
match = _tagprog.match(url)
if match: return match.group(1, 2)
return url, None
def splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return words[0], words[1:]
_valueprog = None
def splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
global _valueprog
if _valueprog is None:
import re
_valueprog = re.compile('^([^=]*)=(.*)$')
match = _valueprog.match(attr)
if match: return match.group(1, 2)
return attr, None
| mit |
dagnir/servo | tests/wpt/web-platform-tests/webdriver/base_test.py | 58 | 1398 | import ConfigParser
import json
import os
import sys
import unittest
from network import get_lan_ip
repo_root = os.path.abspath(os.path.join(__file__, "../.."))
sys.path.insert(1, os.path.join(repo_root, "tools", "webdriver"))
sys.path.insert(1, os.path.join(repo_root, "tools", "wptserve"))
from wptserve import server
from webdriver.driver import WebDriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = server.WebTestHttpd(host=get_lan_ip())
cls.webserver.start()
cls.webserver.where_is = cls.webserver.get_url
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
config = ConfigParser.ConfigParser()
config.read('webdriver.cfg')
section = os.environ.get("WD_BROWSER", 'firefox')
url = 'http://127.0.0.1:4444/wd/hub'
if config.has_option(section, 'url'):
url = config.get(section, "url")
capabilities = None
if config.has_option(section, 'capabilities'):
try:
capabilities = json.loads(config.get(section, "capabilities"))
except:
pass
mode = 'compatibility'
if config.has_option(section, 'mode'):
mode = config.get(section, 'mode')
return WebDriver(url, {}, capabilities, mode)
| mpl-2.0 |
TomHeatwole/osf.io | scripts/migrate_names_cleanup.py | 54 | 3659 | #!/usr/bin/env python
# encoding: utf-8
"""Migrate user name fields
Run dry run: python -m scripts.migrate_names_cleanup dry
Run migration: python -m scripts.migrate_names_cleanup
Log: Run by sloria on 2015-02-17. A log was saved to /opt/data/migration-logs.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from framework.auth import core
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def migrate_user(user, dry_run):
if user.fullname and user.fullname != user.fullname.strip():
logger.info(u'Updated User: {}, fullname: "{}"'.format(user._id, user.fullname))
user.fullname = user.fullname.strip()
if user.given_name and user.given_name != user.given_name.strip():
logger.info(u'Updated User: {}, given_name: "{}"'.format(user._id, user.given_name))
user.given_name = user.given_name.strip()
if user.middle_names and user.middle_names != user.middle_names.strip():
logger.info(u'Updated User: {}, middle_names: "{}"'.format(user._id, user.middle_names))
user.middle_names = user.middle_names.strip()
if user.family_name and user.family_name != user.family_name.strip():
logger.info(u'Updated User: {}, family_name: "{}"'.format(user._id, user.family_name))
user.family_name = user.family_name.strip()
if user.suffix and user.suffix != user.suffix.strip():
logger.info(u'Updated User: {}, suffix: "{}"'.format(user._id, user.suffix))
user.suffix = user.suffix.strip()
if not dry_run:
user.save()
def get_targets():
return core.User.find()
def main(dry_run):
users = get_targets()
for user in users:
migrate_user(user, dry_run)
if __name__ == '__main__':
init_app(set_backends=True, routes=False)
dry_run = 'dry' in sys.argv
# Log to file
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry_run=dry_run)
import mock
from nose.tools import * # noqa
from tests.base import OsfTestCase
from website.addons.osfstorage.tests.factories import AuthUserFactory
class TestMigrateUser(OsfTestCase):
def tearDown(self):
super(TestMigrateUser, self).tearDown()
core.User.remove()
def test_get_targets(self):
[AuthUserFactory() for _ in range(5)]
targets = get_targets()
assert_equal(len(targets), 5)
def test_migrate_user(self):
user = AuthUserFactory()
orig_user = user
user.fullname = ' {} '.format(user.fullname)
user.given_name = ' {} '.format(user.given_name)
user.middle_names = ' {} '.format(user.middle_names)
user.family_name = ' {} '.format(user.family_name)
user.suffix = ' {} '.format(user.suffix)
user.save()
migrate_user(user, dry_run=False)
user.reload()
assert_equal(user.fullname, orig_user.fullname)
assert_equal(user.given_name, orig_user.given_name)
assert_equal(user.middle_names, orig_user.middle_names)
assert_equal(user.family_name, orig_user.family_name)
assert_equal(user.suffix, orig_user.suffix)
@mock.patch('scripts.migrate_names_cleanup.get_targets')
def test_dry_run(self, mock_targets):
user1 = mock.Mock()
user1.fullname = ' {} '.format('tea g. pot')
user2 = mock.Mock()
user2.fullname = 'normal name'
users = [user1, user2]
mock_targets.return_value = users
main(dry_run=True)
for user in users:
assert not user.save.called
| apache-2.0 |
insiderr/insiderr-app | ios-patches/basemodules/twisted/protocols/wire.py | 63 | 2359 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Implement standard (and unused) TCP protocols.
These protocols are either provided by inetd, or are not provided at all.
"""
# system imports
import time, struct
from zope.interface import implements
# twisted import
from twisted.internet import protocol, interfaces
class Echo(protocol.Protocol):
"""As soon as any data is received, write it back (RFC 862)"""
def dataReceived(self, data):
self.transport.write(data)
class Discard(protocol.Protocol):
"""Discard any received data (RFC 863)"""
def dataReceived(self, data):
# I'm ignoring you, nyah-nyah
pass
class Chargen(protocol.Protocol):
"""Generate repeating noise (RFC 864)"""
noise = r'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ !"#$%&?'
implements(interfaces.IProducer)
def connectionMade(self):
self.transport.registerProducer(self, 0)
def resumeProducing(self):
self.transport.write(self.noise)
def pauseProducing(self):
pass
def stopProducing(self):
pass
class QOTD(protocol.Protocol):
"""Return a quote of the day (RFC 865)"""
def connectionMade(self):
self.transport.write(self.getQuote())
self.transport.loseConnection()
def getQuote(self):
"""Return a quote. May be overrriden in subclasses."""
return "An apple a day keeps the doctor away.\r\n"
class Who(protocol.Protocol):
"""Return list of active users (RFC 866)"""
def connectionMade(self):
self.transport.write(self.getUsers())
self.transport.loseConnection()
def getUsers(self):
"""Return active users. Override in subclasses."""
return "root\r\n"
class Daytime(protocol.Protocol):
"""Send back the daytime in ASCII form (RFC 867)"""
def connectionMade(self):
self.transport.write(time.asctime(time.gmtime(time.time())) + '\r\n')
self.transport.loseConnection()
class Time(protocol.Protocol):
"""Send back the time in machine readable form (RFC 868)"""
def connectionMade(self):
# is this correct only for 32-bit machines?
result = struct.pack("!i", int(time.time()))
self.transport.write(result)
self.transport.loseConnection()
| gpl-3.0 |
marcsans/cnn-physics-perception | phy/lib/python2.7/site-packages/keras/constraints.py | 11 | 4002 | from __future__ import absolute_import
from . import backend as K
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {'name': self.__class__.__name__}
class MaxNorm(Constraint):
'''Constrain the weights incident to each hidden unit to have a norm less than or equal to a desired value.
# Arguments
m: the maximum norm for the incoming weights.
axis: integer, axis along which to calculate weight norms. For instance,
in a `Dense` layer the weight matrix has shape (input_dim, output_dim),
set `axis` to `0` to constrain each weight vector of length (input_dim).
In a `MaxoutDense` layer the weight tensor has shape (nb_feature, input_dim, output_dim),
set `axis` to `1` to constrain each weight vector of length (input_dim),
i.e. constrain the filters incident to the `max` operation.
In a `Convolution2D` layer with the Theano backend, the weight tensor
has shape (nb_filter, stack_size, nb_row, nb_col), set `axis` to `[1,2,3]`
to constrain the weights of each filter tensor of size (stack_size, nb_row, nb_col).
In a `Convolution2D` layer with the TensorFlow backend, the weight tensor
has shape (nb_row, nb_col, stack_size, nb_filter), set `axis` to `[0,1,2]`
to constrain the weights of each filter tensor of size (nb_row, nb_col, stack_size).
# References
- [Dropout: A Simple Way to Prevent Neural Networks from Overfitting Srivastava, Hinton, et al. 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)
'''
def __init__(self, m=2, axis=0):
self.m = m
self.axis = axis
def __call__(self, p):
norms = K.sqrt(K.sum(K.square(p), axis=self.axis, keepdims=True))
desired = K.clip(norms, 0, self.m)
p = p * (desired / (K.epsilon() + norms))
return p
def get_config(self):
return {'name': self.__class__.__name__,
'm': self.m,
'axis': self.axis}
class NonNeg(Constraint):
'''Constrain the weights to be non-negative.
'''
def __call__(self, p):
p *= K.cast(p >= 0., K.floatx())
return p
class UnitNorm(Constraint):
'''Constrain the weights incident to each hidden unit to have unit norm.
# Arguments
axis: integer, axis along which to calculate weight norms. For instance,
in a `Dense` layer the weight matrix has shape (input_dim, output_dim),
set `axis` to `0` to constrain each weight vector of length (input_dim).
In a `MaxoutDense` layer the weight tensor has shape (nb_feature, input_dim, output_dim),
set `axis` to `1` to constrain each weight vector of length (input_dim),
i.e. constrain the filters incident to the `max` operation.
In a `Convolution2D` layer with the Theano backend, the weight tensor
has shape (nb_filter, stack_size, nb_row, nb_col), set `axis` to `[1,2,3]`
to constrain the weights of each filter tensor of size (stack_size, nb_row, nb_col).
In a `Convolution2D` layer with the TensorFlow backend, the weight tensor
has shape (nb_row, nb_col, stack_size, nb_filter), set `axis` to `[0,1,2]`
to constrain the weights of each filter tensor of size (nb_row, nb_col, stack_size).
'''
def __init__(self, axis=0):
self.axis = axis
def __call__(self, p):
return p / (K.epsilon() + K.sqrt(K.sum(K.square(p), axis=self.axis, keepdims=True)))
def get_config(self):
return {'name': self.__class__.__name__,
'axis': self.axis}
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint',
instantiate=True, kwargs=kwargs)
| mit |
DroidThug/kernel_delta_msm8916 | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
beihaiguaishou/HackRunningGo-SC | HaRunGo.py | 1 | 10445 | import fileinput
import requests
import json
import base64
import random
import datetime
import re
import time
import hashlib
from Phone import *
import uuid
import codecs
# Globle Var
file1 = open('route.data')
routes = file1.readlines()
file1.close()
file2 = codecs.open('tp.data', 'r', 'utf-8')
tps = file2.readlines()
file2.close()
#tots = []
#for route in routes:
# times = re.findall(r'\\\"totalTime\\\"\:(\d+)', route)
# t = times[len(times) - 1]
# tots.append(int(t))
# print tots
tot_cnt = len(routes)
def base16encode(username):
return str(base64.b16encode(username))
def base64encode(username, pwd):
list = [username, pwd]
sign = ':'
strr = sign.join(list)
return "Basic " + str(base64.b64encode(strr))
#def virtualDevicedId(username):
# fi = base16encode(username)
# la = username[1:]
# id = fi + la
# res = "%s-%s-%s-%s-%s" % (id[0:8], id[8:12], id[12:16], id[16:20], id[20:])
# return res
#def virtualCustomDeviceId(username):
# return virtualDevicedId(username) + "_iOS_sportsWorld_campus"
def selectRoute():
return int(random.uniform(0, tot_cnt - 1))
def datetime_to_timestamp_in_milliseconds(d):
return int(time.mktime(d.timetuple()) * 1000)
#def format(data, totTime):
# data = str(data)
# res = re.findall(r'\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}', data)
# startTime = res[0]
# startDate = startTime[0:10]
# dateToday = datetime.date.today()
# newData = data.replace(startDate, str(dateToday))
# startTimeDtObj = datetime.datetime.now() + datetime.timedelta(seconds = -int(totTime))
# endTimeDtObj = startTimeDtObj + datetime.timedelta(seconds = int(totTime))
# startTimeDtObj = datetime.datetime.strptime(startTime, "%Y-%m-%d %H:%M:%S")
# startTimeTiObj = time.strptime(startTime, "%Y-%m-%d %H:%M:%S")
#st = datetime_to_timestamp_in_milliseconds(startTimeDtObj)
#et = datetime_to_timestamp_in_milliseconds(endTimeDtObj)
# newData = data.replace(str(dataDate), str(data_today))
#res = re.findall(r'\d{13}', newData)
#newData = newData.replace(res[0], str(st))
# print("new data: " + newData)
# print("totTime: " + str(totTime))
# print("start: " + str(st))
# print("end: " + str(et))
#return str(newData), int(st), int(et)
def login(username, pwd):
url = 'http://gxapp.iydsj.com/api/v9/login'
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"Authorization": base64encode(username, pwd),
"osType": "0",
"Content-Type": "application/json",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion": "1.3.10",
"timeStamp": str(int(time.time()*1000))
}
Session = requests.Session()
data = {
"device_model":getDeviceModel(),
"imei":getImei(),
"loginType":1,
"mac_address":getMacAdress(),
"os_version":"0"
}
Request = Session.post(url, headers = headers, data = json.dumps(data))
reqData = Request.content
print ('login response: ' + reqData)
dicData = json.loads(reqData)
return dicData['data']
def dataUpload(userInfo):
url = 'http://gxapp.iydsj.com/api/v10/runnings/add_record'
timeStamp = str(int(time.time()*1000))
dic = {
'uid':userInfo['uid'],
'token':userInfo['token'],
'timeStamp':timeStamp
}
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"osType": "0",
"Content-Type": "application/json",
"deviceName": getDeviceModel(),
"osType": "0",
"osVersion": "1.3.10",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion":"1.3.10",
"uid":str(dic['uid']),
"token":dic['token'],
"tokenSign":digestDict(dic),
"timeStamp":dic['timeStamp']
}
#index = 0
#while index == 0:
index = selectRoute()
print ("Use " + str(index) + " data")
alllocjson = json.loads(routes[index])
fivepointjson = json.loads(tps[index])
allloc = json.loads(alllocjson['allLocJson'])
fivepoint = json.loads(fivepointjson['fivePointJson'])
oldflag = allloc[0]['flag']
totaltime = allloc[len(allloc)-1]['totalTime']
newflag = int(time.time()*1000) - totaltime*1000
delta = newflag-oldflag
timedelta = datetime.timedelta(days = int(delta/86400000), seconds = int(delta/1000)%86400, microseconds = delta%1000)
speedid = int(random.uniform(0, 250))
stepid = int(random.uniform(0, 250))
currentdis = 0.0
currenttime = newflag
allstep = []
allspeed = []
for i in fivepoint:
i['flag'] = newflag
#i['pointName'] = 'gogogo'
for i in allloc:
i['flag'] = newflag
oldtime = datetime.datetime.strptime(i['gainTime'],'%Y-%m-%d %H:%M:%S')
newtime = oldtime + timedelta
#print newtime
endtime = datetime_to_timestamp_in_milliseconds(newtime)
distance = float(i['totalDis']) - currentdis
currentdis = float(i['totalDis'])
i['gainTime'] = newtime.strftime('%Y-%m-%d %H:%M:%S')
step = {
"avgDiff": random.uniform(12, 14),
"beginTime": currenttime,
"endTime": endtime,
"flag": newflag,
"id": stepid,
"maxDiff": random.uniform(15, 20),
"minDiff": random.uniform(8, 10),
"stepsNum": int(distance/0.8)
}
allstep.append(step)
speed = {
"beginTime": currenttime,
"distance": distance,
"endTime": endtime,
"flag": newflag,
"id": speedid
}
allspeed.append(speed)
currenttime = endtime
speedid += 1
stepid += 1
# thisdata, st, et = format(routes[index], tots[index])
# print thisdata
# totDisA = re.findall(r'\\\"totalDis\\\"\:\\\"(\d+.\d+)\\\"', thisdata)
# totDis = float(totDisA[len(totDisA) - 1]) / 1000
# print totDis, tots[index]
# speed = random.uniform(5, 7)
# print speed
# speed_str = "%.2f" % (speed)
# totDis_str = "%.2f" % (totDis)
# print speed_str
# print totDis_str
alllocjson['allLocJson'] = json.dumps(allloc)
fivepointjson['fivePointJson'] = json.dumps(fivepoint, ensure_ascii=False)
postjson = {
"allLocJson": json.dumps(alllocjson),
"sportType": 1,
"totalTime": totaltime,
"totalDis": int(currentdis),
"speed": int(1000/(currentdis/totaltime)/60*1000),
"startTime": newflag,
"stopTime": currenttime,
"fivePointJson": json.dumps(fivepointjson, ensure_ascii=False),
"complete": True,
"selDistance": 1,
"unCompleteReason": 0,
"getPrize": False,
"status": 0,
"uid": userInfo['uid'],
"avgStepFreq": int(currentdis/1.2/totaltime*60),
"totalSteps": int(currentdis/1.2),
"selectedUnid": userInfo['unid'],
"uuid": str(uuid.uuid1())
}
signature = digestDict(postjson)
postjson['signature'] = signature
postjson['isUpload'] = False
postjson['more'] = True
postjson['roomId'] = 0
postjson['speedPerTenSec'] = allspeed
postjson['stepsPerTenSec'] = allstep
# print json.dumps(postjson)
# print signature
Session = requests.Session()
Request = Session.post(url, headers = headers, data=json.dumps(postjson))
print ('upload response: ' + Request.content)
def logout(userInfo):
url = 'http://gxapp.iydsj.com/api/v6/user/logout'
timeStamp = str(int(time.time()*1000))
dic = {
'uid':userInfo['uid'],
'token':userInfo['token'],
'timeStamp':timeStamp
}
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"osType": "0",
"Content-Type": "application/json",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion":"1.3.10",
"uid":str(dic['uid']),
"token":dic['token'],
"tokenSign":digestDict(dic),
"timeStamp":dic['timeStamp']
}
# print headers
Session = requests.Session()
Request = Session.post(url, headers = headers)
print ('logout response: ' + Request.content)
def digestDict(dic):
keys = dic.keys()
keys.sort()
digeststr = u''
for key in keys:
if not isinstance(dic[key],bool):
digeststr = digeststr+unicode(key)+u'='+unicode(dic[key])+u'&'
else:
if dic[key]:
digeststr = digeststr+unicode(key)+u'='+u'true'+u'&'
else:
digeststr = digeststr+unicode(key)+u'='+u'false'+u'&'
digeststr+=u'wh2016_swcampus'
md5 = hashlib.md5()
#digeststr = digeststr.encode('utf-8')
length = len(digeststr)
count = 0
while count<length:
if not ord(digeststr[count])<=0x7F:
#md5.update(digeststr[count+2])
codepoint = ord(digeststr[count])
lowbyte = codepoint - ((codepoint >>8 ) << 8)
md5.update(chr(lowbyte))
count+=1
else:
md5.update(digeststr[count])
count+=1
return md5.hexdigest()
# charArray.append
def writeByData():
file = open('user.data', 'r')
# line = file.readlines()
line = []
for l in open('user.data'):
l = file.readline()
if l != '\n':
line.append(l.strip('\n'))
# print line
# for l in line:
# user, pwd = l.split(' ')
# print (base64encode(user, pwd))
print line
file.close()
return line
def main():
users = writeByData()
# index = selectRoute()
# format(routes[index], 100)
for u in users:
username, password = u.split(' ')
print username, password
print "Start : %s" % time.ctime()
userInfo = login(username, password)
try:
dataUpload(userInfo)
finally:
logout(userInfo)
sleeptime = random.randint(20, 120)
print "Sleep %d seconds" % sleeptime
time.sleep(sleeptime)
if __name__== '__main__':
main()
| mit |
insomnia-lab/calibre | src/calibre/ebooks/tweak.py | 7 | 5025 | #!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import sys, os, shlex, subprocess, shutil
from calibre import prints, as_unicode, walk
from calibre.constants import iswindows, __appname__
from calibre.ptempfile import TemporaryDirectory, TemporaryFile
from calibre.libunzip import extract as zipextract
from calibre.utils.zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
from calibre.utils.ipc.simple_worker import WorkerError
class Error(ValueError):
pass
def ask_cli_question(msg):
prints(msg, end=' [y/N]: ')
sys.stdout.flush()
if iswindows:
import msvcrt
ans = msvcrt.getch()
else:
import tty, termios
old_settings = termios.tcgetattr(sys.stdin.fileno())
try:
tty.setraw(sys.stdin.fileno())
try:
ans = sys.stdin.read(1)
except KeyboardInterrupt:
ans = b''
finally:
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
print()
return ans == b'y'
def mobi_exploder(path, tdir, question=lambda x:True):
from calibre.ebooks.mobi.tweak import explode, BadFormat
try:
return explode(path, tdir, question=question)
except BadFormat as e:
raise Error(as_unicode(e))
def zip_exploder(path, tdir, question=lambda x:True):
zipextract(path, tdir)
for f in walk(tdir):
if f.lower().endswith('.opf'):
return f
raise Error('Invalid book: Could not find .opf')
def zip_rebuilder(tdir, path):
with ZipFile(path, 'w', compression=ZIP_DEFLATED) as zf:
# Write mimetype
mt = os.path.join(tdir, 'mimetype')
if os.path.exists(mt):
zf.write(mt, 'mimetype', compress_type=ZIP_STORED)
# Write everything else
exclude_files = {'.DS_Store', 'mimetype', 'iTunesMetadata.plist'}
for root, dirs, files in os.walk(tdir):
for fn in files:
if fn in exclude_files:
continue
absfn = os.path.join(root, fn)
zfn = os.path.relpath(absfn, tdir).replace(os.sep, '/')
zf.write(absfn, zfn)
def get_tools(fmt):
fmt = fmt.lower()
if fmt in {'mobi', 'azw', 'azw3'}:
from calibre.ebooks.mobi.tweak import rebuild
ans = mobi_exploder, rebuild
elif fmt in {'epub', 'htmlz'}:
ans = zip_exploder, zip_rebuilder
else:
ans = None, None
return ans
def tweak(ebook_file):
''' Command line interface to the Tweak Book tool '''
fmt = ebook_file.rpartition('.')[-1].lower()
exploder, rebuilder = get_tools(fmt)
if exploder is None:
prints('Cannot tweak %s files. Supported formats are: EPUB, HTMLZ, AZW3, MOBI'
, file=sys.stderr)
raise SystemExit(1)
with TemporaryDirectory('_tweak_'+
os.path.basename(ebook_file).rpartition('.')[0]) as tdir:
try:
opf = exploder(ebook_file, tdir, question=ask_cli_question)
except WorkerError as e:
prints('Failed to unpack', ebook_file)
prints(e.orig_tb)
raise SystemExit(1)
except Error as e:
prints(as_unicode(e), file=sys.stderr)
raise SystemExit(1)
if opf is None:
# The question was answered with No
return
ed = os.environ.get('EDITOR', 'dummy')
cmd = shlex.split(ed)
isvim = bool([x for x in cmd[0].split('/') if x.endswith('vim')])
proceed = False
prints('Book extracted to', tdir)
if not isvim:
prints('Make your tweaks and once you are done,', __appname__,
'will rebuild', ebook_file, 'from', tdir)
print()
proceed = ask_cli_question('Rebuild ' + ebook_file + '?')
else:
base = os.path.basename(ebook_file)
with TemporaryFile(base+'.zip') as zipf:
with ZipFile(zipf, 'w') as zf:
zf.add_dir(tdir)
try:
subprocess.check_call(cmd + [zipf])
except:
prints(ed, 'failed, aborting...')
raise SystemExit(1)
with ZipFile(zipf, 'r') as zf:
shutil.rmtree(tdir)
os.mkdir(tdir)
zf.extractall(path=tdir)
proceed = True
if proceed:
prints('Rebuilding', ebook_file, 'please wait ...')
try:
rebuilder(tdir, ebook_file)
except WorkerError as e:
prints('Failed to rebuild', ebook_file)
prints(e.orig_tb)
raise SystemExit(1)
prints(ebook_file, 'successfully tweaked')
| gpl-3.0 |
Bysmyyr/chromium-crosswalk | tools/telemetry/third_party/gsutilz/third_party/boto/tests/integration/route53/test_zone.py | 100 | 7729 | # Copyright (c) 2011 Blue Pines Technologies LLC, Brad Carleton
# www.bluepines.org
# Copyright (c) 2012 42 Lines Inc., Jim Browne
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import time
from tests.compat import unittest
from nose.plugins.attrib import attr
from boto.route53.connection import Route53Connection
from boto.exception import TooManyRecordsException
from boto.vpc import VPCConnection
@attr(route53=True)
class TestRoute53Zone(unittest.TestCase):
@classmethod
def setUpClass(self):
route53 = Route53Connection()
self.base_domain = 'boto-test-%s.com' % str(int(time.time()))
zone = route53.get_zone(self.base_domain)
if zone is not None:
zone.delete()
self.zone = route53.create_zone(self.base_domain)
def test_nameservers(self):
self.zone.get_nameservers()
def test_a(self):
self.zone.add_a(self.base_domain, '102.11.23.1', 80)
record = self.zone.get_a(self.base_domain)
self.assertEquals(record.name, u'%s.' % self.base_domain)
self.assertEquals(record.resource_records, [u'102.11.23.1'])
self.assertEquals(record.ttl, u'80')
self.zone.update_a(self.base_domain, '186.143.32.2', '800')
record = self.zone.get_a(self.base_domain)
self.assertEquals(record.name, u'%s.' % self.base_domain)
self.assertEquals(record.resource_records, [u'186.143.32.2'])
self.assertEquals(record.ttl, u'800')
def test_cname(self):
self.zone.add_cname(
'www.%s' % self.base_domain,
'webserver.%s' % self.base_domain,
200
)
record = self.zone.get_cname('www.%s' % self.base_domain)
self.assertEquals(record.name, u'www.%s.' % self.base_domain)
self.assertEquals(record.resource_records, [
u'webserver.%s.' % self.base_domain
])
self.assertEquals(record.ttl, u'200')
self.zone.update_cname(
'www.%s' % self.base_domain,
'web.%s' % self.base_domain,
45
)
record = self.zone.get_cname('www.%s' % self.base_domain)
self.assertEquals(record.name, u'www.%s.' % self.base_domain)
self.assertEquals(record.resource_records, [
u'web.%s.' % self.base_domain
])
self.assertEquals(record.ttl, u'45')
def test_mx(self):
self.zone.add_mx(
self.base_domain,
[
'10 mx1.%s' % self.base_domain,
'20 mx2.%s' % self.base_domain,
],
1000
)
record = self.zone.get_mx(self.base_domain)
self.assertEquals(set(record.resource_records),
set([u'10 mx1.%s.' % self.base_domain,
u'20 mx2.%s.' % self.base_domain]))
self.assertEquals(record.ttl, u'1000')
self.zone.update_mx(
self.base_domain,
[
'10 mail1.%s' % self.base_domain,
'20 mail2.%s' % self.base_domain,
],
50
)
record = self.zone.get_mx(self.base_domain)
self.assertEquals(set(record.resource_records),
set([u'10 mail1.%s.' % self.base_domain,
'20 mail2.%s.' % self.base_domain]))
self.assertEquals(record.ttl, u'50')
def test_get_records(self):
self.zone.get_records()
def test_get_nameservers(self):
self.zone.get_nameservers()
def test_get_zones(self):
route53 = Route53Connection()
route53.get_zones()
def test_identifiers_wrrs(self):
self.zone.add_a('wrr.%s' % self.base_domain, '1.2.3.4',
identifier=('foo', '20'))
self.zone.add_a('wrr.%s' % self.base_domain, '5.6.7.8',
identifier=('bar', '10'))
wrrs = self.zone.find_records(
'wrr.%s' % self.base_domain,
'A',
all=True
)
self.assertEquals(len(wrrs), 2)
self.zone.delete_a('wrr.%s' % self.base_domain, all=True)
def test_identifiers_lbrs(self):
self.zone.add_a('lbr.%s' % self.base_domain, '4.3.2.1',
identifier=('baz', 'us-east-1'))
self.zone.add_a('lbr.%s' % self.base_domain, '8.7.6.5',
identifier=('bam', 'us-west-1'))
lbrs = self.zone.find_records(
'lbr.%s' % self.base_domain,
'A',
all=True
)
self.assertEquals(len(lbrs), 2)
self.zone.delete_a('lbr.%s' % self.base_domain,
identifier=('bam', 'us-west-1'))
self.zone.delete_a('lbr.%s' % self.base_domain,
identifier=('baz', 'us-east-1'))
def test_toomany_exception(self):
self.zone.add_a('exception.%s' % self.base_domain, '4.3.2.1',
identifier=('baz', 'us-east-1'))
self.zone.add_a('exception.%s' % self.base_domain, '8.7.6.5',
identifier=('bam', 'us-west-1'))
self.assertRaises(TooManyRecordsException,
lambda: self.zone.get_a('exception.%s' %
self.base_domain))
self.zone.delete_a('exception.%s' % self.base_domain, all=True)
@classmethod
def tearDownClass(self):
self.zone.delete_a(self.base_domain)
self.zone.delete_cname('www.%s' % self.base_domain)
self.zone.delete_mx(self.base_domain)
self.zone.delete()
@attr(route53=True)
class TestRoute53PrivateZone(unittest.TestCase):
@classmethod
def setUpClass(self):
time_str = str(int(time.time()))
self.route53 = Route53Connection()
self.base_domain = 'boto-private-zone-test-%s.com' % time_str
self.vpc = VPCConnection()
self.test_vpc = self.vpc.create_vpc(cidr_block='10.11.0.0/16')
# tag the vpc to make it easily identifiable if things go spang
self.test_vpc.add_tag("Name", self.base_domain)
self.zone = self.route53.get_zone(self.base_domain)
if self.zone is not None:
self.zone.delete()
def test_create_private_zone(self):
self.zone = self.route53.create_hosted_zone(self.base_domain,
private_zone=True,
vpc_id=self.test_vpc.id,
vpc_region='us-east-1')
@classmethod
def tearDownClass(self):
if self.zone is not None:
self.zone.delete()
self.test_vpc.delete()
if __name__ == '__main__':
unittest.main(verbosity=3)
| bsd-3-clause |
kstrauser/ansible | lib/ansible/utils/color.py | 167 | 3134 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible import constants as C
ANSIBLE_COLOR=True
if C.ANSIBLE_NOCOLOR:
ANSIBLE_COLOR=False
elif not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
ANSIBLE_COLOR=False
else:
try:
import curses
curses.setupterm()
if curses.tigetnum('colors') < 0:
ANSIBLE_COLOR=False
except ImportError:
# curses library was not found
pass
except curses.error:
# curses returns an error (e.g. could not find terminal)
ANSIBLE_COLOR=False
if C.ANSIBLE_FORCE_COLOR:
ANSIBLE_COLOR=True
# --- begin "pretty"
#
# pretty - A miniature library that provides a Python print and stdout
# wrapper that makes colored terminal text easier to use (e.g. without
# having to mess around with ANSI escape sequences). This code is public
# domain - there is no license except that you must leave this header.
#
# Copyright (C) 2008 Brian Nez <thedude at bri1 dot com>
#
# http://nezzen.net/2008/06/23/colored-text-in-python-using-ansi-escape-sequences/
codeCodes = {
'black': '0;30', 'bright gray': '0;37',
'blue': '0;34', 'white': '1;37',
'green': '0;32', 'bright blue': '1;34',
'cyan': '0;36', 'bright green': '1;32',
'red': '0;31', 'bright cyan': '1;36',
'purple': '0;35', 'bright red': '1;31',
'yellow': '0;33', 'bright purple': '1;35',
'dark gray': '1;30', 'bright yellow': '1;33',
'normal': '0'
}
def stringc(text, color):
"""String in color."""
if ANSIBLE_COLOR:
return "\033["+codeCodes[color]+"m"+text+"\033[0m"
else:
return text
# --- end "pretty"
def colorize(lead, num, color):
""" Print 'lead' = 'num' in 'color' """
if num != 0 and ANSIBLE_COLOR and color is not None:
return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
else:
return "%s=%-4s" % (lead, str(num))
def hostcolor(host, stats, color=True):
if ANSIBLE_COLOR and color:
if stats['failures'] != 0 or stats['unreachable'] != 0:
return "%-37s" % stringc(host, 'red')
elif stats['changed'] != 0:
return "%-37s" % stringc(host, 'yellow')
else:
return "%-37s" % stringc(host, 'green')
return "%-26s" % host
| gpl-3.0 |
2ndQuadrant/ansible | lib/ansible/modules/windows/win_hostname.py | 52 | 1319 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
# Copyright: (c) 2018, Ripon Banik (@riponbanik)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
module: win_hostname
version_added: "2.6"
short_description: Manages local Windows computer name
description:
- Manages local Windows computer name.
- A reboot is required for the computer name to take effect.
options:
name:
description:
- The hostname to set for the computer.
type: str
required: true
seealso:
- module: win_dns_client
author:
- Ripon Banik (@riponbanik)
'''
EXAMPLES = r'''
- name: Change the hostname to sample-hostname
win_hostname:
name: sample-hostname
register: res
- name: Reboot
win_reboot:
when: res.reboot_required
'''
RETURN = r'''
old_name:
description: The original hostname that was set before it was changed.
returned: always
type: str
sample: old_hostname
reboot_required:
description: Whether a reboot is required to complete the hostname change.
returned: always
type: bool
sample: true
'''
| gpl-3.0 |
titasakgm/brc-stock | openerp/addons/report_geraldo/lib/geraldo/site/newsite/site-geraldo/django/contrib/auth/tokens.py | 17 | 2371 | from datetime import date
from django.conf import settings
from django.utils.http import int_to_base36, base36_to_int
class PasswordResetTokenGenerator(object):
"""
Stratgy object used to generate and check tokens for the password
reset mechanism.
"""
def make_token(self, user):
"""
Returns a token that can be used once to do a password reset
for the given user.
"""
return self._make_token_with_timestamp(user, self._num_days(self._today()))
def check_token(self, user, token):
"""
Check that a password reset token is correct for a given user.
"""
# Parse the tokem
try:
ts_b36, hash = token.split("-")
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp/uid has not been tampered with
if self._make_token_with_timestamp(user, ts) != token:
return False
# Check the timestamp is within limit
if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, timestamp):
# timestamp is number of days since 2001-1-1. Converted to
# base 36, this gives us a 3 digit string until about 2121
ts_b36 = int_to_base36(timestamp)
# By hashing on the internal state of the user and using state
# that is sure to change (the password salt will change as soon as
# the password is set, at least for current Django auth, and
# last_login will also change), we produce a hash that will be
# invalid as soon as it is used.
# We limit the hash to 20 chars to keep URL short
from django.utils.hashcompat import sha_constructor
hash = sha_constructor(settings.SECRET_KEY + unicode(user.id) +
user.password + unicode(user.last_login) +
unicode(timestamp)).hexdigest()[::2]
return "%s-%s" % (ts_b36, hash)
def _num_days(self, dt):
return (dt - date(2001,1,1)).days
def _today(self):
# Used for mocking in tests
return date.today()
default_token_generator = PasswordResetTokenGenerator()
| agpl-3.0 |
40123247/w17 | static/Brython3.1.0-20150301-090019/Lib/ui/dialog.py | 607 | 4994 | from . import widget
from browser import html, document
class Dialog(widget.DraggableWidget):
def __init__(self, id=None):
self._div_shell=html.DIV(
Class="ui-dialog ui-widget ui-widget-content ui-corner-all ui-front ui-draggable ui-resizable",
style={'position': 'absolute', 'height': 'auto', 'width': '300px',
'top': '98px', 'left': '140px', 'display': 'block'})
widget.DraggableWidget.__init__(self, self._div_shell, 'dialog', id)
_div_titlebar=html.DIV(Id="titlebar",
Class="ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix")
self._div_shell <= _div_titlebar
self._div_title=html.SPAN(Id="title", Class="ui-dialog-title")
_div_titlebar <= self._div_title
self._title_button=html.BUTTON(Title="close",
Class="ui-button ui-widget ui-state-default ui-corner-all ui-button-icon-only ui-dialog-titlebar-close")
def dialog_close(e):
#del document[self._div_shell.id]
del document[self._div_shell.id]
self._title_button.bind('click', dialog_close)
_span=html.SPAN(Class="ui-button-icon-primary ui-icon ui-icon-closethick")
self._title_button <= _span
_span=html.SPAN('close', Class="ui-button-text")
self._title_button <= _span
_div_titlebar <= self._title_button
self._div_dialog=html.DIV(Class="ui-dialog-content ui-widget-content",
style={'width': 'auto', 'min-height': '105px',
'max-height': 'none', 'height': 'auto'})
self._div_shell <= self._div_dialog
for _i in ['n', 'e', 's', 'w', 'se', 'sw', 'ne', 'nw']:
if _i == 'se':
_class="ui-resizable-handle ui-resizable-%s ui-icon ui-icon-gripsmall-diagonal-%s" % (_i, _i)
else:
_class="ui-resizable-handle ui-resizable-%s" % _i
self._div_shell <= html.DIV(Class=_class, style={'z-index': '90'})
document <= self._div_shell
def set_title(self, title):
self._div_title.set_text(title)
def set_body(self, body):
self._div_dialog.set_html(body)
class EntryDialog(Dialog):
def __init__(self, title, prompt, action, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.action = action
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
self.entry = html.INPUT()
body = html.DIV(d_prompt+self.entry,
style={'padding':'15px'})
b_ok = html.BUTTON("Ok")
b_ok.bind('click', self.ok)
b_cancel = html.BUTTON("Cancel")
b_cancel.bind('click', self.cancel)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def ok(self, ev):
self.result = self._div_shell.get(selector='INPUT')[0].value
self.action(self.result)
document.remove(self._div_shell)
def cancel(self, ev):
document.remove(self._div_shell)
class SelectDialog(Dialog):
def __init__(self, title, prompt, options, action, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.options = options
self.action = action
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
self.select = html.SELECT()
for option in options:
self.select <= html.OPTION(option)
body = html.DIV(d_prompt+self.select,
style={'padding':'15px'})
b_ok = html.BUTTON("Ok")
b_ok.bind('click', self.ok)
b_cancel = html.BUTTON("Cancel")
b_cancel.bind('click', self.cancel)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def ok(self, ev):
ix = self._div_shell.get(selector='SELECT')[0].selectedIndex
document.remove(self._div_shell)
self.action(self.options[ix])
def cancel(self, ev):
document.remove(self._div_shell)
class YesNoDialog(Dialog):
def __init__(self, title, prompt, action_if_yes, action_if_no, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.action_if_yes = action_if_yes
self.action_if_no = action_if_no
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
body = html.DIV(d_prompt, style={'padding':'15px'})
b_ok = html.BUTTON("Yes")
b_ok.bind('click', self.yes)
b_cancel = html.BUTTON("No")
b_cancel.bind('click', self.no)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def yes(self, ev):
document.remove(self._div_shell)
self.action_if_yes(self)
def no(self, ev):
document.remove(self._div_shell)
if self.action_if_no is not None:
self.action_if_no(self)
| gpl-3.0 |
collinjackson/mojo | tools/valgrind/browser_wrapper_win.py | 76 | 1632 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import re
import sys
import subprocess
# TODO(timurrrr): we may use it on POSIX too to avoid code duplication once we
# support layout_tests, remove Dr. Memory specific code and verify it works
# on a "clean" Mac.
testcase_name = None
for arg in sys.argv:
m = re.match("\-\-gtest_filter=(.*)", arg)
if m:
assert testcase_name is None
testcase_name = m.groups()[0]
# arg #0 is the path to this python script
cmd_to_run = sys.argv[1:]
# TODO(timurrrr): this is Dr. Memory-specific
# Usually, we pass "-logdir" "foo\bar\spam path" args to Dr. Memory.
# To group reports per UI test, we want to put the reports for each test into a
# separate directory. This code can be simplified when we have
# https://github.com/DynamoRIO/drmemory/issues/684 fixed.
logdir_idx = cmd_to_run.index("-logdir")
old_logdir = cmd_to_run[logdir_idx + 1]
wrapper_pid = str(os.getpid())
# On Windows, there is a chance of PID collision. We avoid it by appending the
# number of entries in the logdir at the end of wrapper_pid.
# This number is monotonic and we can't have two simultaneously running wrappers
# with the same PID.
wrapper_pid += "_%d" % len(glob.glob(old_logdir + "\\*"))
cmd_to_run[logdir_idx + 1] += "\\testcase.%s.logs" % wrapper_pid
os.makedirs(cmd_to_run[logdir_idx + 1])
if testcase_name:
f = open(old_logdir + "\\testcase.%s.name" % wrapper_pid, "w")
print >>f, testcase_name
f.close()
exit(subprocess.call(cmd_to_run))
| bsd-3-clause |
medelin/bearcoin | contrib/testgen/base58.py | 4 | 2833 | '''
Nautiluscoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Nautiluscoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/nautiluscoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
| mit |
wiltonlazary/arangodb | 3rdParty/V8/V8-5.0.71.39/tools/clang/scripts/run_tool.py | 10 | 11655 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper script to help run clang tools across Chromium code.
How to use this tool:
If you want to run the tool across all Chromium code:
run_tool.py <tool> <path/to/compiledb>
If you want to include all files mentioned in the compilation database:
run_tool.py <tool> <path/to/compiledb> --all
If you only want to run the tool across just chrome/browser and content/browser:
run_tool.py <tool> <path/to/compiledb> chrome/browser content/browser
Please see https://code.google.com/p/chromium/wiki/ClangToolRefactoring for more
information, which documents the entire automated refactoring flow in Chromium.
Why use this tool:
The clang tool implementation doesn't take advantage of multiple cores, and if
it fails mysteriously in the middle, all the generated replacements will be
lost.
Unfortunately, if the work is simply sharded across multiple cores by running
multiple RefactoringTools, problems arise when they attempt to rewrite a file at
the same time. To work around that, clang tools that are run using this tool
should output edits to stdout in the following format:
==== BEGIN EDITS ====
r:<file path>:<offset>:<length>:<replacement text>
r:<file path>:<offset>:<length>:<replacement text>
...etc...
==== END EDITS ====
Any generated edits are applied once the clang tool has finished running
across Chromium, regardless of whether some instances failed or not.
"""
import argparse
import collections
import functools
import json
import multiprocessing
import os.path
import subprocess
import sys
Edit = collections.namedtuple('Edit',
('edit_type', 'offset', 'length', 'replacement'))
def _GenerateCompileDatabase(path):
"""Generates a compile database.
Note: requires ninja.
Args:
path: The build directory to generate a compile database for.
"""
# TODO(dcheng): Incorporate Windows-specific compile DB munging from
# https://codereview.chromium.org/718873004
print 'Generating compile database in %s...' % path
args = ['ninja', '-C', path, '-t', 'compdb', 'cc', 'cxx', 'objc', 'objcxx']
output = subprocess.check_output(args)
with file(os.path.join(path, 'compile_commands.json'), 'w') as f:
f.write(output)
def _GetFilesFromGit(paths=None):
"""Gets the list of files in the git repository.
Args:
paths: Prefix filter for the returned paths. May contain multiple entries.
"""
args = []
if sys.platform == 'win32':
args.append('git.bat')
else:
args.append('git')
args.append('ls-files')
if paths:
args.extend(paths)
command = subprocess.Popen(args, stdout=subprocess.PIPE)
output, _ = command.communicate()
return [os.path.realpath(p) for p in output.splitlines()]
def _GetFilesFromCompileDB(build_directory):
""" Gets the list of files mentioned in the compilation database.
Args:
build_directory: Directory that contains the compile database.
"""
compiledb_path = os.path.join(build_directory, 'compile_commands.json')
with open(compiledb_path, 'rb') as compiledb_file:
json_commands = json.load(compiledb_file)
return [os.path.join(entry['directory'], entry['file'])
for entry in json_commands]
def _ExtractEditsFromStdout(build_directory, stdout):
"""Extracts generated list of edits from the tool's stdout.
The expected format is documented at the top of this file.
Args:
build_directory: Directory that contains the compile database. Used to
normalize the filenames.
stdout: The stdout from running the clang tool.
Returns:
A dictionary mapping filenames to the associated edits.
"""
lines = stdout.splitlines()
start_index = lines.index('==== BEGIN EDITS ====')
end_index = lines.index('==== END EDITS ====')
edits = collections.defaultdict(list)
for line in lines[start_index + 1:end_index]:
try:
edit_type, path, offset, length, replacement = line.split(':::', 4)
replacement = replacement.replace('\0', '\n')
# Normalize the file path emitted by the clang tool.
path = os.path.realpath(os.path.join(build_directory, path))
edits[path].append(Edit(edit_type, int(offset), int(length), replacement))
except ValueError:
print 'Unable to parse edit: %s' % line
return edits
def _ExecuteTool(toolname, build_directory, filename):
"""Executes the tool.
This is defined outside the class so it can be pickled for the multiprocessing
module.
Args:
toolname: Path to the tool to execute.
build_directory: Directory that contains the compile database.
filename: The file to run the tool over.
Returns:
A dictionary that must contain the key "status" and a boolean value
associated with it.
If status is True, then the generated edits are stored with the key "edits"
in the dictionary.
Otherwise, the filename and the output from stderr are associated with the
keys "filename" and "stderr" respectively.
"""
command = subprocess.Popen(
(toolname, '-p', build_directory, filename),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = command.communicate()
if command.returncode != 0:
return {'status': False, 'filename': filename, 'stderr': stderr}
else:
return {'status': True,
'edits': _ExtractEditsFromStdout(build_directory, stdout)}
class _CompilerDispatcher(object):
"""Multiprocessing controller for running clang tools in parallel."""
def __init__(self, toolname, build_directory, filenames):
"""Initializer method.
Args:
toolname: Path to the tool to execute.
build_directory: Directory that contains the compile database.
filenames: The files to run the tool over.
"""
self.__toolname = toolname
self.__build_directory = build_directory
self.__filenames = filenames
self.__success_count = 0
self.__failed_count = 0
self.__edit_count = 0
self.__edits = collections.defaultdict(list)
@property
def edits(self):
return self.__edits
@property
def failed_count(self):
return self.__failed_count
def Run(self):
"""Does the grunt work."""
pool = multiprocessing.Pool()
result_iterator = pool.imap_unordered(
functools.partial(_ExecuteTool, self.__toolname,
self.__build_directory), self.__filenames)
for result in result_iterator:
self.__ProcessResult(result)
sys.stdout.write('\n')
sys.stdout.flush()
def __ProcessResult(self, result):
"""Handles result processing.
Args:
result: The result dictionary returned by _ExecuteTool.
"""
if result['status']:
self.__success_count += 1
for k, v in result['edits'].iteritems():
self.__edits[k].extend(v)
self.__edit_count += len(v)
else:
self.__failed_count += 1
sys.stdout.write('\nFailed to process %s\n' % result['filename'])
sys.stdout.write(result['stderr'])
sys.stdout.write('\n')
percentage = (float(self.__success_count + self.__failed_count) /
len(self.__filenames)) * 100
sys.stdout.write('Succeeded: %d, Failed: %d, Edits: %d [%.2f%%]\r' %
(self.__success_count, self.__failed_count,
self.__edit_count, percentage))
sys.stdout.flush()
def _ApplyEdits(edits):
"""Apply the generated edits.
Args:
edits: A dict mapping filenames to Edit instances that apply to that file.
"""
edit_count = 0
for k, v in edits.iteritems():
# Sort the edits and iterate through them in reverse order. Sorting allows
# duplicate edits to be quickly skipped, while reversing means that
# subsequent edits don't need to have their offsets updated with each edit
# applied.
v.sort()
last_edit = None
with open(k, 'rb+') as f:
contents = bytearray(f.read())
for edit in reversed(v):
if edit == last_edit:
continue
last_edit = edit
contents[edit.offset:edit.offset + edit.length] = edit.replacement
if not edit.replacement:
_ExtendDeletionIfElementIsInList(contents, edit.offset)
edit_count += 1
f.seek(0)
f.truncate()
f.write(contents)
print 'Applied %d edits to %d files' % (edit_count, len(edits))
_WHITESPACE_BYTES = frozenset((ord('\t'), ord('\n'), ord('\r'), ord(' ')))
def _ExtendDeletionIfElementIsInList(contents, offset):
"""Extends the range of a deletion if the deleted element was part of a list.
This rewriter helper makes it easy for refactoring tools to remove elements
from a list. Even if a matcher callback knows that it is removing an element
from a list, it may not have enough information to accurately remove the list
element; for example, another matcher callback may end up removing an adjacent
list element, or all the list elements may end up being removed.
With this helper, refactoring tools can simply remove the list element and not
worry about having to include the comma in the replacement.
Args:
contents: A bytearray with the deletion already applied.
offset: The offset in the bytearray where the deleted range used to be.
"""
char_before = char_after = None
left_trim_count = 0
for byte in reversed(contents[:offset]):
left_trim_count += 1
if byte in _WHITESPACE_BYTES:
continue
if byte in (ord(','), ord(':'), ord('('), ord('{')):
char_before = chr(byte)
break
right_trim_count = 0
for byte in contents[offset:]:
right_trim_count += 1
if byte in _WHITESPACE_BYTES:
continue
if byte == ord(','):
char_after = chr(byte)
break
if char_before:
if char_after:
del contents[offset:offset + right_trim_count]
elif char_before in (',', ':'):
del contents[offset - left_trim_count:offset]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('tool', help='clang tool to run')
parser.add_argument('--all', action='store_true')
parser.add_argument(
'--generate-compdb',
action='store_true',
help='regenerate the compile database before running the tool')
parser.add_argument(
'compile_database',
help='path to the directory that contains the compile database')
parser.add_argument(
'path_filter',
nargs='*',
help='optional paths to filter what files the tool is run on')
args = parser.parse_args()
if args.generate_compdb:
_GenerateCompileDatabase(args.compile_database)
if args.all:
filenames = set(_GetFilesFromCompileDB(args.compile_database))
source_filenames = filenames
else:
filenames = set(_GetFilesFromGit(args.path_filter))
# Filter out files that aren't C/C++/Obj-C/Obj-C++.
extensions = frozenset(('.c', '.cc', '.cpp', '.m', '.mm'))
source_filenames = [f
for f in filenames
if os.path.splitext(f)[1] in extensions]
dispatcher = _CompilerDispatcher(args.tool, args.compile_database,
source_filenames)
dispatcher.Run()
# Filter out edits to files that aren't in the git repository, since it's not
# useful to modify files that aren't under source control--typically, these
# are generated files or files in a git submodule that's not part of Chromium.
_ApplyEdits({k: v
for k, v in dispatcher.edits.iteritems()
if os.path.realpath(k) in filenames})
return -dispatcher.failed_count
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
leeseuljeong/leeseulstack_neutron | neutron/tests/unit/test_api_v2_resource.py | 2 | 14852 | # Copyright (c) 2012 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo import i18n
from webob import exc
import webtest
from neutron.api.v2 import resource as wsgi_resource
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.tests import base
from neutron import wsgi
class RequestTestCase(base.BaseTestCase):
def setUp(self):
super(RequestTestCase, self).setUp()
self.req = wsgi_resource.Request({'foo': 'bar'})
def test_content_type_missing(self):
request = wsgi.Request.blank('/tests/123', method='POST')
request.body = "<body />"
self.assertIsNone(request.get_content_type())
def test_content_type_with_charset(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Type"] = "application/json; charset=UTF-8"
result = request.get_content_type()
self.assertEqual(result, "application/json")
def test_content_type_from_accept(self):
content_type = 'application/json'
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = content_type
result = request.best_match_content_type()
self.assertEqual(result, content_type)
def test_content_type_from_accept_best(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = "application/json"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = ("application/json; q=0.3, "
"application/xml; q=0.9")
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_from_query_extension(self):
request = wsgi.Request.blank('/tests/123.json')
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
request = wsgi.Request.blank('/tests/123.invalid')
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_accept_and_query_extension(self):
request = wsgi.Request.blank('/tests/123.json')
request.headers["Accept"] = "application/xml"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_accept_default(self):
request = wsgi.Request.blank('/tests/123.unsupported')
request.headers["Accept"] = "application/unsupported1"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_context_with_neutron_context(self):
ctxt = context.Context('fake_user', 'fake_tenant')
self.req.environ['neutron.context'] = ctxt
self.assertEqual(self.req.context, ctxt)
def test_context_without_neutron_context(self):
self.assertTrue(self.req.context.is_admin)
def test_request_context_elevated(self):
user_context = context.Context(
'fake_user', 'fake_project', admin=False)
self.assertFalse(user_context.is_admin)
admin_context = user_context.elevated()
self.assertFalse(user_context.is_admin)
self.assertTrue(admin_context.is_admin)
self.assertNotIn('admin', user_context.roles)
self.assertIn('admin', admin_context.roles)
def test_best_match_language(self):
# Test that we are actually invoking language negotiation by webop
request = wsgi.Request.blank('/')
i18n.get_available_languages = mock.MagicMock()
i18n.get_available_languages.return_value = ['known-language',
'es', 'zh']
request.headers['Accept-Language'] = 'known-language'
language = request.best_match_language()
self.assertEqual(language, 'known-language')
# If the Accept-Leader is an unknown language, missing or empty,
# the best match locale should be None
request.headers['Accept-Language'] = 'unknown-language'
language = request.best_match_language()
self.assertIsNone(language)
request.headers['Accept-Language'] = ''
language = request.best_match_language()
self.assertIsNone(language)
request.headers.pop('Accept-Language')
language = request.best_match_language()
self.assertIsNone(language)
class ResourceTestCase(base.BaseTestCase):
@staticmethod
def _get_deserializer(req_format):
return wsgi.JSONDeserializer()
def test_unmapped_neutron_error_with_json(self):
msg = u'\u7f51\u7edc'
class TestException(n_exc.NeutronException):
message = msg
expected_res = {'body': {
'NeutronError': {
'type': 'TestException',
'message': msg,
'detail': ''}}}
controller = mock.MagicMock()
controller.test.side_effect = TestException()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
self.assertEqual(wsgi.JSONDeserializer().deserialize(res.body),
expected_res)
@mock.patch('oslo.i18n.translate')
def test_unmapped_neutron_error_localized(self, mock_translation):
msg_translation = 'Translated error'
mock_translation.return_value = msg_translation
msg = _('Unmapped error')
class TestException(n_exc.NeutronException):
message = msg
controller = mock.MagicMock()
controller.test.side_effect = TestException()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
self.assertIn(msg_translation,
str(wsgi.JSONDeserializer().deserialize(res.body)))
def test_mapped_neutron_error_with_json(self):
msg = u'\u7f51\u7edc'
class TestException(n_exc.NeutronException):
message = msg
expected_res = {'body': {
'NeutronError': {
'type': 'TestException',
'message': msg,
'detail': ''}}}
controller = mock.MagicMock()
controller.test.side_effect = TestException()
faults = {TestException: exc.HTTPGatewayTimeout}
resource = webtest.TestApp(wsgi_resource.Resource(controller,
faults=faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPGatewayTimeout.code)
self.assertEqual(wsgi.JSONDeserializer().deserialize(res.body),
expected_res)
@mock.patch('oslo.i18n.translate')
def test_mapped_neutron_error_localized(self, mock_translation):
msg_translation = 'Translated error'
mock_translation.return_value = msg_translation
msg = _('Unmapped error')
class TestException(n_exc.NeutronException):
message = msg
controller = mock.MagicMock()
controller.test.side_effect = TestException()
faults = {TestException: exc.HTTPGatewayTimeout}
resource = webtest.TestApp(wsgi_resource.Resource(controller,
faults=faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPGatewayTimeout.code)
self.assertIn(msg_translation,
str(wsgi.JSONDeserializer().deserialize(res.body)))
@staticmethod
def _make_request_with_side_effect(side_effect, req_format=None):
controller = mock.MagicMock()
controller.test.side_effect = side_effect
resource = webtest.TestApp(wsgi_resource.Resource(controller))
routing_args = {'action': 'test'}
if req_format:
routing_args.update({'format': req_format})
environ = {'wsgiorg.routing_args': (None, routing_args)}
res = resource.get('', extra_environ=environ, expect_errors=True)
return res
def test_http_error(self):
res = self._make_request_with_side_effect(exc.HTTPGatewayTimeout())
# verify that the exception structure is the one expected
# by the python-neutronclient
self.assertEqual(exc.HTTPGatewayTimeout().explanation,
res.json['NeutronError']['message'])
self.assertEqual('HTTPGatewayTimeout',
res.json['NeutronError']['type'])
self.assertEqual('', res.json['NeutronError']['detail'])
self.assertEqual(exc.HTTPGatewayTimeout.code, res.status_int)
def _test_unhandled_error(self, req_format='json'):
expected_res = {'body': {'NeutronError':
{'detail': '',
'message': _(
'Request Failed: internal server '
'error while processing your request.'),
'type': 'HTTPInternalServerError'}}}
res = self._make_request_with_side_effect(side_effect=Exception(),
req_format=req_format)
self.assertEqual(exc.HTTPInternalServerError.code,
res.status_int)
self.assertEqual(expected_res,
self._get_deserializer(
req_format).deserialize(res.body))
def test_unhandled_error_with_json(self):
self._test_unhandled_error()
def _test_not_implemented_error(self, req_format='json'):
expected_res = {'body': {'NeutronError':
{'detail': '',
'message': _(
'The server has either erred or is '
'incapable of performing the requested '
'operation.'),
'type': 'HTTPNotImplemented'}}}
res = self._make_request_with_side_effect(exc.HTTPNotImplemented(),
req_format=req_format)
self.assertEqual(exc.HTTPNotImplemented.code, res.status_int)
self.assertEqual(expected_res,
self._get_deserializer(
req_format).deserialize(res.body))
def test_not_implemented_error_with_json(self):
self._test_not_implemented_error()
def test_status_200(self):
controller = mock.MagicMock()
controller.test = lambda request: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
res = resource.get('', extra_environ=environ)
self.assertEqual(res.status_int, 200)
def test_status_204(self):
controller = mock.MagicMock()
controller.test = lambda request: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'delete'})}
res = resource.delete('', extra_environ=environ)
self.assertEqual(res.status_int, 204)
def _test_error_log_level(self, map_webob_exc, expect_log_info=False,
use_fault_map=True):
class TestException(n_exc.NeutronException):
message = 'Test Exception'
controller = mock.MagicMock()
controller.test.side_effect = TestException()
faults = {TestException: map_webob_exc} if use_fault_map else {}
resource = webtest.TestApp(wsgi_resource.Resource(controller, faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
with mock.patch.object(wsgi_resource, 'LOG') as log:
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, map_webob_exc.code)
self.assertEqual(expect_log_info, log.info.called)
self.assertNotEqual(expect_log_info, log.exception.called)
def test_4xx_error_logged_info_level(self):
self._test_error_log_level(exc.HTTPNotFound, expect_log_info=True)
def test_non_4xx_error_logged_exception_level(self):
self._test_error_log_level(exc.HTTPServiceUnavailable,
expect_log_info=False)
def test_unmapped_error_logged_exception_level(self):
self._test_error_log_level(exc.HTTPInternalServerError,
expect_log_info=False, use_fault_map=False)
def test_no_route_args(self):
controller = mock.MagicMock()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
def test_post_with_body(self):
controller = mock.MagicMock()
controller.test = lambda request, body: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
res = resource.post('', params='{"key": "val"}',
extra_environ=environ)
self.assertEqual(res.status_int, 200)
| apache-2.0 |
Joel-U/sparkle | sparkle/gui/main_control_form.py | 2 | 53245 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:/Users/Joel/Documents/sparkle/sparkle/gui/main_control.ui'
#
# Created: Fri Aug 14 15:04:02 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_ControlWindow(object):
def setupUi(self, ControlWindow):
ControlWindow.setObjectName(_fromUtf8("ControlWindow"))
ControlWindow.resize(1223, 744)
self.centralwidget = QtGui.QWidget(ControlWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.label_3 = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(16)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_4.addWidget(self.label_3)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.reviewLbl = QtGui.QLabel(self.centralwidget)
self.reviewLbl.setText(_fromUtf8(""))
self.reviewLbl.setObjectName(_fromUtf8("reviewLbl"))
self.horizontalLayout.addWidget(self.reviewLbl)
self.runningLabel = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.runningLabel.sizePolicy().hasHeightForWidth())
self.runningLabel.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(12)
self.runningLabel.setFont(font)
self.runningLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.runningLabel.setObjectName(_fromUtf8("runningLabel"))
self.horizontalLayout.addWidget(self.runningLabel)
self.horizontalLayout_4.addLayout(self.horizontalLayout)
self.verticalLayout_3.addLayout(self.horizontalLayout_4)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.verticalLayout_12 = QtGui.QVBoxLayout()
self.verticalLayout_12.setObjectName(_fromUtf8("verticalLayout_12"))
self.tabGroup = QtGui.QTabWidget(self.centralwidget)
self.tabGroup.setObjectName(_fromUtf8("tabGroup"))
self.tabExplore = QtGui.QWidget()
self.tabExplore.setObjectName(_fromUtf8("tabExplore"))
self.horizontalLayout_7 = QtGui.QHBoxLayout(self.tabExplore)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.verticalLayout_9 = QtGui.QVBoxLayout()
self.verticalLayout_9.setObjectName(_fromUtf8("verticalLayout_9"))
self.exploreStimEditor = ExploreStimulusEditor(self.tabExplore)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.exploreStimEditor.sizePolicy().hasHeightForWidth())
self.exploreStimEditor.setSizePolicy(sizePolicy)
self.exploreStimEditor.setFrameShape(QtGui.QFrame.StyledPanel)
self.exploreStimEditor.setFrameShadow(QtGui.QFrame.Raised)
self.exploreStimEditor.setObjectName(_fromUtf8("exploreStimEditor"))
self.verticalLayout_9.addWidget(self.exploreStimEditor)
self.gridLayout_7 = QtGui.QGridLayout()
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.save_explore_ckbx = QtGui.QCheckBox(self.tabExplore)
self.save_explore_ckbx.setEnabled(False)
self.save_explore_ckbx.setObjectName(_fromUtf8("save_explore_ckbx"))
self.gridLayout_7.addWidget(self.save_explore_ckbx, 1, 0, 1, 2)
self.label_20 = QtGui.QLabel(self.tabExplore)
self.label_20.setObjectName(_fromUtf8("label_20"))
self.gridLayout_7.addWidget(self.label_20, 0, 0, 1, 1)
self.overAttenLbl = QtGui.QLabel(self.tabExplore)
self.overAttenLbl.setObjectName(_fromUtf8("overAttenLbl"))
self.gridLayout_7.addWidget(self.overAttenLbl, 0, 1, 1, 1)
self.label_23 = QtGui.QLabel(self.tabExplore)
self.label_23.setObjectName(_fromUtf8("label_23"))
self.gridLayout_7.addWidget(self.label_23, 0, 2, 1, 1)
self.verticalLayout_9.addLayout(self.gridLayout_7)
self.horizontalLayout_7.addLayout(self.verticalLayout_9)
self.tabGroup.addTab(self.tabExplore, _fromUtf8(""))
self.tabProtocol = QtGui.QWidget()
self.tabProtocol.setObjectName(_fromUtf8("tabProtocol"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tabProtocol)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.label_2 = QtGui.QLabel(self.tabProtocol)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout_5.addWidget(self.label_2)
self.protocolView = ProtocolView(self.tabProtocol)
self.protocolView.setObjectName(_fromUtf8("protocolView"))
self.verticalLayout_5.addWidget(self.protocolView)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.stimulusChoices = StimulusLabelTable(self.tabProtocol)
self.stimulusChoices.setObjectName(_fromUtf8("stimulusChoices"))
self.horizontalLayout_9.addWidget(self.stimulusChoices)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_9.addItem(spacerItem)
self.pushButton = QtGui.QPushButton(self.tabProtocol)
self.pushButton.setMaximumSize(QtCore.QSize(200, 16777215))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.horizontalLayout_9.addWidget(self.pushButton)
self.verticalLayout_5.addLayout(self.horizontalLayout_9)
self.tabGroup.addTab(self.tabProtocol, _fromUtf8(""))
self.tabCalibrate = QtGui.QWidget()
self.tabCalibrate.setObjectName(_fromUtf8("tabCalibrate"))
self.verticalLayout = QtGui.QVBoxLayout(self.tabCalibrate)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.calibrationWidget = CalibrationWidget(self.tabCalibrate)
self.calibrationWidget.setObjectName(_fromUtf8("calibrationWidget"))
self.verticalLayout.addWidget(self.calibrationWidget)
self.gridLayout_3 = QtGui.QGridLayout()
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.overAttenLbl_2 = QtGui.QLabel(self.tabCalibrate)
self.overAttenLbl_2.setObjectName(_fromUtf8("overAttenLbl_2"))
self.gridLayout_3.addWidget(self.overAttenLbl_2, 0, 1, 1, 1)
self.label_19 = QtGui.QLabel(self.tabCalibrate)
self.label_19.setObjectName(_fromUtf8("label_19"))
self.gridLayout_3.addWidget(self.label_19, 0, 0, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_3)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.refToneLbl = QtGui.QLabel(self.tabCalibrate)
self.refToneLbl.setObjectName(_fromUtf8("refToneLbl"))
self.horizontalLayout_10.addWidget(self.refToneLbl)
self.label_4 = QtGui.QLabel(self.tabCalibrate)
self.label_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_10.addWidget(self.label_4)
self.refDbSpnbx = SmartSpinBox(self.tabCalibrate)
self.refDbSpnbx.setEnabled(False)
self.refDbSpnbx.setAlignment(QtCore.Qt.AlignCenter)
self.refDbSpnbx.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.refDbSpnbx.setMaximum(120.0)
self.refDbSpnbx.setObjectName(_fromUtf8("refDbSpnbx"))
self.horizontalLayout_10.addWidget(self.refDbSpnbx)
self.label_15 = QtGui.QLabel(self.tabCalibrate)
self.label_15.setObjectName(_fromUtf8("label_15"))
self.horizontalLayout_10.addWidget(self.label_15)
self.verticalLayout.addLayout(self.horizontalLayout_10)
self.horizontalLayout_12 = QtGui.QHBoxLayout()
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.label_9 = QtGui.QLabel(self.tabCalibrate)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.horizontalLayout_12.addWidget(self.label_9)
self.mphoneSensSpnbx = QtGui.QDoubleSpinBox(self.tabCalibrate)
self.mphoneSensSpnbx.setEnabled(False)
self.mphoneSensSpnbx.setAlignment(QtCore.Qt.AlignCenter)
self.mphoneSensSpnbx.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.mphoneSensSpnbx.setDecimals(4)
self.mphoneSensSpnbx.setObjectName(_fromUtf8("mphoneSensSpnbx"))
self.horizontalLayout_12.addWidget(self.mphoneSensSpnbx)
self.label_18 = QtGui.QLabel(self.tabCalibrate)
self.label_18.setObjectName(_fromUtf8("label_18"))
self.horizontalLayout_12.addWidget(self.label_18)
self.mphoneDBSpnbx = QtGui.QDoubleSpinBox(self.tabCalibrate)
self.mphoneDBSpnbx.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mphoneDBSpnbx.sizePolicy().hasHeightForWidth())
self.mphoneDBSpnbx.setSizePolicy(sizePolicy)
self.mphoneDBSpnbx.setAlignment(QtCore.Qt.AlignCenter)
self.mphoneDBSpnbx.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.mphoneDBSpnbx.setDecimals(0)
self.mphoneDBSpnbx.setMaximum(120.0)
self.mphoneDBSpnbx.setObjectName(_fromUtf8("mphoneDBSpnbx"))
self.horizontalLayout_12.addWidget(self.mphoneDBSpnbx)
self.label_22 = QtGui.QLabel(self.tabCalibrate)
self.label_22.setObjectName(_fromUtf8("label_22"))
self.horizontalLayout_12.addWidget(self.label_22)
self.mphoneCalBtn = QtGui.QPushButton(self.tabCalibrate)
self.mphoneCalBtn.setObjectName(_fromUtf8("mphoneCalBtn"))
self.horizontalLayout_12.addWidget(self.mphoneCalBtn)
self.verticalLayout.addLayout(self.horizontalLayout_12)
self.tabGroup.addTab(self.tabCalibrate, _fromUtf8(""))
self.tabReview = QtGui.QWidget()
self.tabReview.setObjectName(_fromUtf8("tabReview"))
self.verticalLayout_13 = QtGui.QVBoxLayout(self.tabReview)
self.verticalLayout_13.setObjectName(_fromUtf8("verticalLayout_13"))
self.reviewer = QDataReviewer(self.tabReview)
self.reviewer.setObjectName(_fromUtf8("reviewer"))
self.verticalLayout_13.addWidget(self.reviewer)
self.tabGroup.addTab(self.tabReview, _fromUtf8(""))
self.verticalLayout_12.addWidget(self.tabGroup)
self.protocolProgressBar = QtGui.QProgressBar(self.centralwidget)
self.protocolProgressBar.setEnabled(True)
self.protocolProgressBar.setProperty("value", 0)
self.protocolProgressBar.setObjectName(_fromUtf8("protocolProgressBar"))
self.verticalLayout_12.addWidget(self.protocolProgressBar)
self.horizontalLayout_5.addLayout(self.verticalLayout_12)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.gridLayout_4 = QtGui.QGridLayout()
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.label_29 = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_29.sizePolicy().hasHeightForWidth())
self.label_29.setSizePolicy(sizePolicy)
self.label_29.setObjectName(_fromUtf8("label_29"))
self.gridLayout_4.addWidget(self.label_29, 0, 0, 1, 1)
self.chanNumLbl = QtGui.QLabel(self.centralwidget)
self.chanNumLbl.setObjectName(_fromUtf8("chanNumLbl"))
self.gridLayout_4.addWidget(self.chanNumLbl, 1, 1, 1, 1)
self.aochanBox = QtGui.QComboBox(self.centralwidget)
self.aochanBox.setObjectName(_fromUtf8("aochanBox"))
self.gridLayout_4.addWidget(self.aochanBox, 0, 1, 1, 2)
self.label_24 = QtGui.QLabel(self.centralwidget)
self.label_24.setObjectName(_fromUtf8("label_24"))
self.gridLayout_4.addWidget(self.label_24, 1, 0, 1, 1)
self.aichanBtn = QtGui.QPushButton(self.centralwidget)
self.aichanBtn.setObjectName(_fromUtf8("aichanBtn"))
self.gridLayout_4.addWidget(self.aichanBtn, 1, 2, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_4)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_10 = QtGui.QLabel(self.centralwidget)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.gridLayout.addWidget(self.label_10, 2, 0, 1, 1)
self.label_43 = QtGui.QLabel(self.centralwidget)
self.label_43.setObjectName(_fromUtf8("label_43"))
self.gridLayout.addWidget(self.label_43, 3, 0, 1, 1)
self.aifsSpnbx = SmartSpinBox(self.centralwidget)
self.aifsSpnbx.setDecimals(3)
self.aifsSpnbx.setMaximum(500000.0)
self.aifsSpnbx.setObjectName(_fromUtf8("aifsSpnbx"))
self.gridLayout.addWidget(self.aifsSpnbx, 0, 1, 1, 1)
self.binszSpnbx = SmartSpinBox(self.centralwidget)
self.binszSpnbx.setDecimals(3)
self.binszSpnbx.setMaximum(3.0)
self.binszSpnbx.setSingleStep(0.001)
self.binszSpnbx.setProperty("value", 0.001)
self.binszSpnbx.setObjectName(_fromUtf8("binszSpnbx"))
self.gridLayout.addWidget(self.binszSpnbx, 3, 1, 1, 1)
self.reprateSpnbx = QtGui.QDoubleSpinBox(self.centralwidget)
self.reprateSpnbx.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.reprateSpnbx.setPrefix(_fromUtf8(""))
self.reprateSpnbx.setMinimum(0.1)
self.reprateSpnbx.setMaximum(10.0)
self.reprateSpnbx.setProperty("value", 0.5)
self.reprateSpnbx.setObjectName(_fromUtf8("reprateSpnbx"))
self.gridLayout.addWidget(self.reprateSpnbx, 4, 1, 1, 1)
self.label_35 = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_35.sizePolicy().hasHeightForWidth())
self.label_35.setSizePolicy(sizePolicy)
self.label_35.setObjectName(_fromUtf8("label_35"))
self.gridLayout.addWidget(self.label_35, 4, 0, 1, 1)
self.windowszSpnbx = SmartSpinBox(self.centralwidget)
self.windowszSpnbx.setDecimals(3)
self.windowszSpnbx.setMaximum(3.0)
self.windowszSpnbx.setSingleStep(0.001)
self.windowszSpnbx.setProperty("value", 0.1)
self.windowszSpnbx.setObjectName(_fromUtf8("windowszSpnbx"))
self.gridLayout.addWidget(self.windowszSpnbx, 2, 1, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout)
self.gridLayout_5 = QtGui.QGridLayout()
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.averageChbx = QtGui.QCheckBox(self.centralwidget)
self.averageChbx.setObjectName(_fromUtf8("averageChbx"))
self.gridLayout_5.addWidget(self.averageChbx, 3, 0, 1, 1)
self.label_17 = QtGui.QLabel(self.centralwidget)
self.label_17.setObjectName(_fromUtf8("label_17"))
self.gridLayout_5.addWidget(self.label_17, 0, 0, 1, 1)
self.modeCmbx = QtGui.QComboBox(self.centralwidget)
self.modeCmbx.setObjectName(_fromUtf8("modeCmbx"))
self.modeCmbx.addItem(_fromUtf8(""))
self.modeCmbx.addItem(_fromUtf8(""))
self.gridLayout_5.addWidget(self.modeCmbx, 0, 1, 1, 1)
self.trigchanBox = QtGui.QComboBox(self.centralwidget)
self.trigchanBox.setObjectName(_fromUtf8("trigchanBox"))
self.gridLayout_5.addWidget(self.trigchanBox, 6, 1, 1, 1)
self.artifactRejectSpnbx = SmartSpinBox(self.centralwidget)
self.artifactRejectSpnbx.setEnabled(False)
self.artifactRejectSpnbx.setDecimals(3)
self.artifactRejectSpnbx.setMinimum(0.0)
self.artifactRejectSpnbx.setMaximum(100.0)
self.artifactRejectSpnbx.setSingleStep(0.001)
self.artifactRejectSpnbx.setProperty("value", 5.0)
self.artifactRejectSpnbx.setObjectName(_fromUtf8("artifactRejectSpnbx"))
self.gridLayout_5.addWidget(self.artifactRejectSpnbx, 5, 1, 1, 1)
self.artifactRejectChbx = QtGui.QCheckBox(self.centralwidget)
self.artifactRejectChbx.setEnabled(False)
self.artifactRejectChbx.setChecked(False)
self.artifactRejectChbx.setObjectName(_fromUtf8("artifactRejectChbx"))
self.gridLayout_5.addWidget(self.artifactRejectChbx, 5, 0, 1, 1)
self.trigCkbx = QtGui.QCheckBox(self.centralwidget)
self.trigCkbx.setObjectName(_fromUtf8("trigCkbx"))
self.gridLayout_5.addWidget(self.trigCkbx, 6, 0, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_5)
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setFrameShape(QtGui.QFrame.HLine)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout_2.addWidget(self.frame)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_8 = QtGui.QLabel(self.centralwidget)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout_2.addWidget(self.label_8, 0, 0, 1, 1)
self.currentCalLbl = QtGui.QLabel(self.centralwidget)
self.currentCalLbl.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.currentCalLbl.setWordWrap(True)
self.currentCalLbl.setObjectName(_fromUtf8("currentCalLbl"))
self.gridLayout_2.addWidget(self.currentCalLbl, 0, 1, 1, 1)
self.dataFileLbl = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dataFileLbl.sizePolicy().hasHeightForWidth())
self.dataFileLbl.setSizePolicy(sizePolicy)
self.dataFileLbl.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.dataFileLbl.setObjectName(_fromUtf8("dataFileLbl"))
self.gridLayout_2.addWidget(self.dataFileLbl, 1, 1, 1, 1)
self.label_14 = QtGui.QLabel(self.centralwidget)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.gridLayout_2.addWidget(self.label_14, 1, 0, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_2)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem1)
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.startChartBtn = QtGui.QPushButton(self.centralwidget)
self.startChartBtn.setObjectName(_fromUtf8("startChartBtn"))
self.horizontalLayout_8.addWidget(self.startChartBtn)
self.stopChartBtn = QtGui.QPushButton(self.centralwidget)
self.stopChartBtn.setObjectName(_fromUtf8("stopChartBtn"))
self.horizontalLayout_8.addWidget(self.stopChartBtn)
self.verticalLayout_2.addLayout(self.horizontalLayout_8)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.startBtn = QtGui.QPushButton(self.centralwidget)
self.startBtn.setObjectName(_fromUtf8("startBtn"))
self.horizontalLayout_2.addWidget(self.startBtn)
self.stopBtn = QtGui.QPushButton(self.centralwidget)
self.stopBtn.setObjectName(_fromUtf8("stopBtn"))
self.horizontalLayout_2.addWidget(self.stopBtn)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.horizontalLayout_5.addLayout(self.verticalLayout_2)
self.verticalLayout_3.addLayout(self.horizontalLayout_5)
ControlWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(ControlWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1223, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuOptions = QtGui.QMenu(self.menubar)
self.menuOptions.setObjectName(_fromUtf8("menuOptions"))
self.menuView = QtGui.QMenu(self.menubar)
self.menuView.setObjectName(_fromUtf8("menuView"))
self.menuShow = QtGui.QMenu(self.menuView)
self.menuShow.setObjectName(_fromUtf8("menuShow"))
ControlWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(ControlWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
ControlWindow.setStatusBar(self.statusbar)
self.plotDock = PlotDockWidget(ControlWindow)
self.plotDock.setObjectName(_fromUtf8("plotDock"))
self.dockWidgetContents = QtGui.QWidget()
self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.dockWidgetContents)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.plotDock.setWidget(self.dockWidgetContents)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(8), self.plotDock)
self.psthDock = QtGui.QDockWidget(ControlWindow)
self.psthDock.setObjectName(_fromUtf8("psthDock"))
self.dockWidgetContents_2 = QtGui.QWidget()
self.dockWidgetContents_2.setObjectName(_fromUtf8("dockWidgetContents_2"))
self.verticalLayout_10 = QtGui.QVBoxLayout(self.dockWidgetContents_2)
self.verticalLayout_10.setObjectName(_fromUtf8("verticalLayout_10"))
self.PSTHContainer = QtGui.QWidget(self.dockWidgetContents_2)
self.PSTHContainer.setObjectName(_fromUtf8("PSTHContainer"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.PSTHContainer)
self.verticalLayout_6.setSpacing(0)
self.verticalLayout_6.setMargin(0)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.psth = PSTHWidget(self.PSTHContainer)
self.psth.setObjectName(_fromUtf8("psth"))
self.verticalLayout_6.addWidget(self.psth)
self.gridLayout_9 = QtGui.QGridLayout()
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.label_11 = QtGui.QLabel(self.PSTHContainer)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_11.sizePolicy().hasHeightForWidth())
self.label_11.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(12)
self.label_11.setFont(font)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.gridLayout_9.addWidget(self.label_11, 0, 0, 1, 1)
self.label_16 = QtGui.QLabel(self.PSTHContainer)
font = QtGui.QFont()
font.setPointSize(12)
self.label_16.setFont(font)
self.label_16.setObjectName(_fromUtf8("label_16"))
self.gridLayout_9.addWidget(self.label_16, 0, 3, 1, 1)
self.label_5 = QtGui.QLabel(self.PSTHContainer)
font = QtGui.QFont()
font.setPointSize(12)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_9.addWidget(self.label_5, 1, 0, 1, 1)
self.spikeLatencyLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeLatencyLbl.setObjectName(_fromUtf8("spikeLatencyLbl"))
self.gridLayout_9.addWidget(self.spikeLatencyLbl, 1, 1, 1, 1)
self.label_7 = QtGui.QLabel(self.PSTHContainer)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7.sizePolicy().hasHeightForWidth())
self.label_7.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(12)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_9.addWidget(self.label_7, 1, 3, 1, 1)
self.spikeRateLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeRateLbl.setObjectName(_fromUtf8("spikeRateLbl"))
self.gridLayout_9.addWidget(self.spikeRateLbl, 1, 4, 1, 1)
self.spikeAvgLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeAvgLbl.setObjectName(_fromUtf8("spikeAvgLbl"))
self.gridLayout_9.addWidget(self.spikeAvgLbl, 0, 4, 1, 1)
self.spikeTotalLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeTotalLbl.setObjectName(_fromUtf8("spikeTotalLbl"))
self.gridLayout_9.addWidget(self.spikeTotalLbl, 0, 1, 1, 1)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_9.addItem(spacerItem2, 0, 2, 1, 1)
self.label_12 = QtGui.QLabel(self.PSTHContainer)
font = QtGui.QFont()
font.setPointSize(12)
self.label_12.setFont(font)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.gridLayout_9.addWidget(self.label_12, 2, 0, 1, 1)
self.spikeStandardDeviationLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeStandardDeviationLbl.setObjectName(_fromUtf8("spikeStandardDeviationLbl"))
self.gridLayout_9.addWidget(self.spikeStandardDeviationLbl, 2, 1, 1, 1)
self.spikeNanLbl = QtGui.QLabel(self.PSTHContainer)
self.spikeNanLbl.setObjectName(_fromUtf8("spikeNanLbl"))
self.gridLayout_9.addWidget(self.spikeNanLbl, 2, 4, 1, 1)
self.label_25 = QtGui.QLabel(self.PSTHContainer)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_25.sizePolicy().hasHeightForWidth())
self.label_25.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(12)
self.label_25.setFont(font)
self.label_25.setObjectName(_fromUtf8("label_25"))
self.gridLayout_9.addWidget(self.label_25, 2, 3, 1, 1)
self.verticalLayout_6.addLayout(self.gridLayout_9)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.groupBox = QtGui.QGroupBox(self.PSTHContainer)
self.groupBox.setFlat(False)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.label_6 = QtGui.QLabel(self.groupBox)
self.label_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_13.addWidget(self.label_6)
self.psthStartField = SmartSpinBox(self.groupBox)
self.psthStartField.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.psthStartField.setObjectName(_fromUtf8("psthStartField"))
self.horizontalLayout_13.addWidget(self.psthStartField)
self.label_13 = QtGui.QLabel(self.groupBox)
self.label_13.setAlignment(QtCore.Qt.AlignCenter)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.horizontalLayout_13.addWidget(self.label_13)
self.psthStopField = SmartSpinBox(self.groupBox)
self.psthStopField.setEnabled(False)
self.psthStopField.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.psthStopField.setObjectName(_fromUtf8("psthStopField"))
self.horizontalLayout_13.addWidget(self.psthStopField)
self.psthMaxBox = QtGui.QCheckBox(self.groupBox)
self.psthMaxBox.setChecked(True)
self.psthMaxBox.setObjectName(_fromUtf8("psthMaxBox"))
self.horizontalLayout_13.addWidget(self.psthMaxBox)
self.horizontalLayout_3.addWidget(self.groupBox)
self.verticalLayout_6.addLayout(self.horizontalLayout_3)
self.verticalLayout_10.addWidget(self.PSTHContainer)
self.psthDock.setWidget(self.dockWidgetContents_2)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.psthDock)
self.progressDock = QtGui.QDockWidget(ControlWindow)
self.progressDock.setObjectName(_fromUtf8("progressDock"))
self.dockWidgetContents_3 = QtGui.QWidget()
self.dockWidgetContents_3.setObjectName(_fromUtf8("dockWidgetContents_3"))
self.progressDock.setWidget(self.dockWidgetContents_3)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.progressDock)
self.log_dock = QtGui.QDockWidget(ControlWindow)
self.log_dock.setObjectName(_fromUtf8("log_dock"))
self.dockWidgetContents_4 = QtGui.QWidget()
self.dockWidgetContents_4.setObjectName(_fromUtf8("dockWidgetContents_4"))
self.verticalLayout_7 = QtGui.QVBoxLayout(self.dockWidgetContents_4)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.logTxedt = QtGui.QPlainTextEdit(self.dockWidgetContents_4)
self.logTxedt.setReadOnly(True)
self.logTxedt.setObjectName(_fromUtf8("logTxedt"))
self.verticalLayout_7.addWidget(self.logTxedt)
self.log_dock.setWidget(self.dockWidgetContents_4)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.log_dock)
self.detail_dock = QtGui.QDockWidget(ControlWindow)
self.detail_dock.setObjectName(_fromUtf8("detail_dock"))
self.dockWidgetContents_5 = QtGui.QWidget()
self.dockWidgetContents_5.setObjectName(_fromUtf8("dockWidgetContents_5"))
self.verticalLayout_11 = QtGui.QVBoxLayout(self.dockWidgetContents_5)
self.verticalLayout_11.setObjectName(_fromUtf8("verticalLayout_11"))
self.stimDetails = StimDetailWidget(self.dockWidgetContents_5)
self.stimDetails.setObjectName(_fromUtf8("stimDetails"))
self.verticalLayout_11.addWidget(self.stimDetails)
self.detail_dock.setWidget(self.dockWidgetContents_5)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(8), self.detail_dock)
self.runComment = QtGui.QDockWidget(ControlWindow)
self.runComment.setObjectName(_fromUtf8("runComment"))
self.dockWidgetContents_6 = QtGui.QWidget()
self.dockWidgetContents_6.setObjectName(_fromUtf8("dockWidgetContents_6"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.dockWidgetContents_6)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.label_21 = QtGui.QLabel(self.dockWidgetContents_6)
self.label_21.setObjectName(_fromUtf8("label_21"))
self.horizontalLayout_11.addWidget(self.label_21)
self.cellIDLbl = QtGui.QLabel(self.dockWidgetContents_6)
self.cellIDLbl.setObjectName(_fromUtf8("cellIDLbl"))
self.horizontalLayout_11.addWidget(self.cellIDLbl)
self.verticalLayout_8.addLayout(self.horizontalLayout_11)
self.commentTxtEdt = QtGui.QPlainTextEdit(self.dockWidgetContents_6)
self.commentTxtEdt.setObjectName(_fromUtf8("commentTxtEdt"))
self.verticalLayout_8.addWidget(self.commentTxtEdt)
self.runComment.setWidget(self.dockWidgetContents_6)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.runComment)
self.responseSpl = QtGui.QDockWidget(ControlWindow)
self.responseSpl.setObjectName(_fromUtf8("responseSpl"))
self.dockWidgetContents_7 = QtGui.QWidget()
self.dockWidgetContents_7.setObjectName(_fromUtf8("dockWidgetContents_7"))
self.verticalLayout_14 = QtGui.QVBoxLayout(self.dockWidgetContents_7)
self.verticalLayout_14.setObjectName(_fromUtf8("verticalLayout_14"))
self.splLayout = QtGui.QGridLayout()
self.splLayout.setObjectName(_fromUtf8("splLayout"))
self.widget = QtGui.QWidget(self.dockWidgetContents_7)
self.widget.setObjectName(_fromUtf8("widget"))
self.splLayout.addWidget(self.widget, 0, 0, 1, 1)
self.verticalLayout_14.addLayout(self.splLayout)
self.responseSpl.setWidget(self.dockWidgetContents_7)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.responseSpl)
self.timerDock = QtGui.QDockWidget(ControlWindow)
self.timerDock.setObjectName(_fromUtf8("timerDock"))
self.timerWidget = TimerWidget()
self.timerWidget.setObjectName(_fromUtf8("timerWidget"))
self.timerDock.setWidget(self.timerWidget)
ControlWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.timerDock)
self.actionSave_Options = QtGui.QAction(ControlWindow)
self.actionSave_Options.setObjectName(_fromUtf8("actionSave_Options"))
self.actionSet_Calibration = QtGui.QAction(ControlWindow)
self.actionSet_Calibration.setObjectName(_fromUtf8("actionSet_Calibration"))
self.actionSet_Scale = QtGui.QAction(ControlWindow)
self.actionSet_Scale.setObjectName(_fromUtf8("actionSet_Scale"))
self.actionShow_Data_display = QtGui.QAction(ControlWindow)
self.actionShow_Data_display.setObjectName(_fromUtf8("actionShow_Data_display"))
self.actionShow_PSTH = QtGui.QAction(ControlWindow)
self.actionShow_PSTH.setObjectName(_fromUtf8("actionShow_PSTH"))
self.actionSpectrogram_Parameters = QtGui.QAction(ControlWindow)
self.actionSpectrogram_Parameters.setObjectName(_fromUtf8("actionSpectrogram_Parameters"))
self.actionShow_Progress = QtGui.QAction(ControlWindow)
self.actionShow_Progress.setObjectName(_fromUtf8("actionShow_Progress"))
self.actionShow_Log = QtGui.QAction(ControlWindow)
self.actionShow_Log.setObjectName(_fromUtf8("actionShow_Log"))
self.actionShow_Stimulus_Details = QtGui.QAction(ControlWindow)
self.actionShow_Stimulus_Details.setObjectName(_fromUtf8("actionShow_Stimulus_Details"))
self.actionView_Settings = QtGui.QAction(ControlWindow)
self.actionView_Settings.setObjectName(_fromUtf8("actionView_Settings"))
self.actionData_Display = QtGui.QAction(ControlWindow)
self.actionData_Display.setObjectName(_fromUtf8("actionData_Display"))
self.actionPSTH = QtGui.QAction(ControlWindow)
self.actionPSTH.setObjectName(_fromUtf8("actionPSTH"))
self.actionProgress = QtGui.QAction(ControlWindow)
self.actionProgress.setObjectName(_fromUtf8("actionProgress"))
self.actionLog = QtGui.QAction(ControlWindow)
self.actionLog.setObjectName(_fromUtf8("actionLog"))
self.actionStimulus_Details = QtGui.QAction(ControlWindow)
self.actionStimulus_Details.setObjectName(_fromUtf8("actionStimulus_Details"))
self.actionSet_Cell_ID = QtGui.QAction(ControlWindow)
self.actionSet_Cell_ID.setObjectName(_fromUtf8("actionSet_Cell_ID"))
self.actionComment = QtGui.QAction(ControlWindow)
self.actionComment.setObjectName(_fromUtf8("actionComment"))
self.actionTimer = QtGui.QAction(ControlWindow)
self.actionTimer.setObjectName(_fromUtf8("actionTimer"))
self.actionResponse_SPL = QtGui.QAction(ControlWindow)
self.actionResponse_SPL.setObjectName(_fromUtf8("actionResponse_SPL"))
self.actionVocalization_paths = QtGui.QAction(ControlWindow)
self.actionVocalization_paths.setObjectName(_fromUtf8("actionVocalization_paths"))
self.actionAdvanced = QtGui.QAction(ControlWindow)
self.actionAdvanced.setObjectName(_fromUtf8("actionAdvanced"))
self.menuOptions.addAction(self.actionSave_Options)
self.menuOptions.addAction(self.actionSet_Calibration)
self.menuOptions.addAction(self.actionSet_Scale)
self.menuOptions.addAction(self.actionSpectrogram_Parameters)
self.menuOptions.addAction(self.actionSet_Cell_ID)
self.menuOptions.addAction(self.actionVocalization_paths)
self.menuOptions.addAction(self.actionAdvanced)
self.menuShow.addAction(self.actionData_Display)
self.menuShow.addAction(self.actionPSTH)
self.menuShow.addAction(self.actionProgress)
self.menuShow.addAction(self.actionLog)
self.menuShow.addAction(self.actionStimulus_Details)
self.menuShow.addAction(self.actionComment)
self.menuShow.addAction(self.actionTimer)
self.menuShow.addAction(self.actionResponse_SPL)
self.menuView.addAction(self.actionView_Settings)
self.menuView.addAction(self.menuShow.menuAction())
self.menubar.addAction(self.menuOptions.menuAction())
self.menubar.addAction(self.menuView.menuAction())
self.retranslateUi(ControlWindow)
self.tabGroup.setCurrentIndex(0)
QtCore.QObject.connect(self.actionSave_Options, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchSaveDlg)
QtCore.QObject.connect(self.actionSet_Calibration, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchCalibrationDlg)
QtCore.QObject.connect(self.actionSet_Scale, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchScaleDlg)
QtCore.QObject.connect(self.actionData_Display, QtCore.SIGNAL(_fromUtf8("triggered()")), self.plotDock.showNormal)
QtCore.QObject.connect(self.actionPSTH, QtCore.SIGNAL(_fromUtf8("triggered()")), self.psthDock.showNormal)
QtCore.QObject.connect(self.actionSpectrogram_Parameters, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchSpecgramDlg)
QtCore.QObject.connect(self.actionProgress, QtCore.SIGNAL(_fromUtf8("triggered()")), self.progressDock.showNormal)
QtCore.QObject.connect(self.tabGroup, QtCore.SIGNAL(_fromUtf8("currentChanged(int)")), ControlWindow.tabChanged)
QtCore.QObject.connect(self.actionLog, QtCore.SIGNAL(_fromUtf8("triggered()")), self.log_dock.showNormal)
QtCore.QObject.connect(self.pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), ControlWindow.clearProtocol)
QtCore.QObject.connect(self.actionStimulus_Details, QtCore.SIGNAL(_fromUtf8("triggered()")), self.detail_dock.showNormal)
QtCore.QObject.connect(self.actionView_Settings, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchViewDlg)
QtCore.QObject.connect(self.refDbSpnbx, QtCore.SIGNAL(_fromUtf8("valueChanged(double)")), ControlWindow.updateCalDb)
QtCore.QObject.connect(self.mphoneSensSpnbx, QtCore.SIGNAL(_fromUtf8("valueChanged(double)")), ControlWindow.updateMicrophoneCalibration)
QtCore.QObject.connect(self.mphoneCalBtn, QtCore.SIGNAL(_fromUtf8("clicked()")), ControlWindow.mphoneCalibrate)
QtCore.QObject.connect(self.mphoneDBSpnbx, QtCore.SIGNAL(_fromUtf8("valueChanged(double)")), ControlWindow.updateMicrophoneCalibration)
QtCore.QObject.connect(self.actionSet_Cell_ID, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchCellDlg)
QtCore.QObject.connect(self.actionComment, QtCore.SIGNAL(_fromUtf8("triggered()")), self.runComment.showNormal)
QtCore.QObject.connect(self.actionTimer, QtCore.SIGNAL(_fromUtf8("triggered()")), self.timerDock.showNormal)
QtCore.QObject.connect(self.actionResponse_SPL, QtCore.SIGNAL(_fromUtf8("triggered()")), self.responseSpl.showNormal)
QtCore.QObject.connect(self.psthMaxBox, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.psthStopField.setDisabled)
QtCore.QObject.connect(self.actionVocalization_paths, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchVocalPaths)
QtCore.QObject.connect(self.aichanBtn, QtCore.SIGNAL(_fromUtf8("clicked()")), ControlWindow.launchChannelDlg)
QtCore.QObject.connect(self.actionAdvanced, QtCore.SIGNAL(_fromUtf8("triggered()")), ControlWindow.launchAdvancedDlg)
QtCore.QObject.connect(self.modeCmbx, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), ControlWindow.modeToggled)
QtCore.QObject.connect(self.modeCmbx, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), ControlWindow.setTriggerEnable)
QtCore.QObject.connect(self.artifactRejectChbx, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.artifactRejectSpnbx.setEnabled)
QtCore.QObject.connect(self.averageChbx, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.artifactRejectChbx.setEnabled)
QtCore.QObject.connect(self.averageChbx, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.artifactRejectSpnbx.setEnabled)
QtCore.QMetaObject.connectSlotsByName(ControlWindow)
def retranslateUi(self, ControlWindow):
ControlWindow.setWindowTitle(_translate("ControlWindow", "SPARKLE", None))
self.label_3.setToolTip(_translate("ControlWindow", "SONIC SPARKLE HIGH FIVE!!!!!", None))
self.label_3.setText(_translate("ControlWindow", "SPARKLE", None))
self.runningLabel.setText(_translate("ControlWindow", "OFF", None))
self.save_explore_ckbx.setText(_translate("ControlWindow", "Save Explore Recording", None))
self.label_20.setText(_translate("ControlWindow", "Undesired Attenuation:", None))
self.overAttenLbl.setToolTip(_translate("ControlWindow", "Stimulus had to be scaled down by this amount to be generated", None))
self.overAttenLbl.setText(_translate("ControlWindow", "0", None))
self.label_23.setText(_translate("ControlWindow", "dB", None))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.tabExplore), _translate("ControlWindow", "Explore", None))
self.label_2.setText(_translate("ControlWindow", "Experiment Protocol:", None))
self.pushButton.setToolTip(_translate("ControlWindow", "Remove all tests", None))
self.pushButton.setText(_translate("ControlWindow", "Clear Protocol", None))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.tabProtocol), _translate("ControlWindow", "Experiment", None))
self.overAttenLbl_2.setText(_translate("ControlWindow", "0", None))
self.label_19.setText(_translate("ControlWindow", "undesired attenuation", None))
self.refToneLbl.setText(_translate("ControlWindow", "Intensity at ? kHz and ?V", None))
self.label_4.setText(_translate("ControlWindow", "=", None))
self.refDbSpnbx.setToolTip(_translate("ControlWindow", "intensity recorded at microphone", None))
self.label_15.setText(_translate("ControlWindow", "dB SPL", None))
self.label_9.setText(_translate("ControlWindow", "Microphone sensitivity", None))
self.mphoneSensSpnbx.setToolTip(_translate("ControlWindow", "Resultant amplitude of microphone measurement", None))
self.label_18.setText(_translate("ControlWindow", "V at", None))
self.mphoneDBSpnbx.setToolTip(_translate("ControlWindow", "intensity of microphone calibration device", None))
self.label_22.setText(_translate("ControlWindow", "dB SPL", None))
self.mphoneCalBtn.setToolTip(_translate("ControlWindow", "Start recording microphone calibration tone", None))
self.mphoneCalBtn.setText(_translate("ControlWindow", "Calibrate Mic", None))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.tabCalibrate), _translate("ControlWindow", "Calibration", None))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.tabReview), _translate("ControlWindow", "Review", None))
self.label_29.setText(_translate("ControlWindow", "Stimulus channel", None))
self.chanNumLbl.setText(_translate("ControlWindow", "0", None))
self.aochanBox.setToolTip(_translate("ControlWindow", "Output channel (AO#)", None))
self.label_24.setText(_translate("ControlWindow", "Recording channels :", None))
self.aichanBtn.setText(_translate("ControlWindow", "choose...", None))
self.label.setText(_translate("ControlWindow", "Acq. Sample rate", None))
self.label_10.setText(_translate("ControlWindow", "Window size", None))
self.label_43.setText(_translate("ControlWindow", "Spike bin size", None))
self.aifsSpnbx.setToolTip(_translate("ControlWindow", "Recording sampling rate", None))
self.binszSpnbx.setToolTip(_translate("ControlWindow", "Time bin duration for detected spikes for raster and PSTH", None))
self.reprateSpnbx.setToolTip(_translate("ControlWindow", "Rate of recording window repetitions", None))
self.reprateSpnbx.setSuffix(_translate("ControlWindow", " reps/s", None))
self.label_35.setText(_translate("ControlWindow", "Rep rate", None))
self.averageChbx.setText(_translate("ControlWindow", "Avg response", None))
self.label_17.setText(_translate("ControlWindow", "Mode", None))
self.modeCmbx.setToolTip(_translate("ControlWindow", "Mode of acquisition to use", None))
self.modeCmbx.setItemText(0, _translate("ControlWindow", "Windowed", None))
self.modeCmbx.setItemText(1, _translate("ControlWindow", "Chart", None))
self.artifactRejectSpnbx.setToolTip(_translate("ControlWindow", "Artifact Rejection Value", None))
self.artifactRejectSpnbx.setSuffix(_translate("ControlWindow", " V", None))
self.artifactRejectChbx.setText(_translate("ControlWindow", "Artifact Rejection", None))
self.trigCkbx.setText(_translate("ControlWindow", "Trigger", None))
self.label_8.setText(_translate("ControlWindow", "Calibration:", None))
self.currentCalLbl.setText(_translate("ControlWindow", "None", None))
self.dataFileLbl.setText(_translate("ControlWindow", "None", None))
self.label_14.setText(_translate("ControlWindow", "Data File:", None))
self.startChartBtn.setText(_translate("ControlWindow", "Start Chart", None))
self.stopChartBtn.setText(_translate("ControlWindow", "Stop Chart", None))
self.startBtn.setToolTip(_translate("ControlWindow", "Begins acquisition with current settings", None))
self.startBtn.setText(_translate("ControlWindow", "Start", None))
self.stopBtn.setText(_translate("ControlWindow", "Stop", None))
self.menuOptions.setTitle(_translate("ControlWindow", "Options", None))
self.menuView.setTitle(_translate("ControlWindow", "View", None))
self.menuShow.setTitle(_translate("ControlWindow", "Show", None))
self.plotDock.setWindowTitle(_translate("ControlWindow", "Data Display", None))
self.psthDock.setWindowTitle(_translate("ControlWindow", "PSTH", None))
self.label_11.setToolTip(_translate("ControlWindow", "Total no. of spikes over repetitions", None))
self.label_11.setText(_translate("ControlWindow", "Total :", None))
self.label_16.setToolTip(_translate("ControlWindow", "Mean no. of spikes per unique stimulus", None))
self.label_16.setText(_translate("ControlWindow", "Average : ", None))
self.label_5.setToolTip(_translate("ControlWindow", "Mean time of first spike (ignoring NaNs)", None))
self.label_5.setText(_translate("ControlWindow", "Latency (ms) : ", None))
self.spikeLatencyLbl.setText(_translate("ControlWindow", "0", None))
self.label_7.setToolTip(_translate("ControlWindow", "Mean no. spikes per window", None))
self.label_7.setText(_translate("ControlWindow", "Rate :", None))
self.spikeRateLbl.setText(_translate("ControlWindow", "0", None))
self.spikeAvgLbl.setText(_translate("ControlWindow", "0", None))
self.spikeTotalLbl.setText(_translate("ControlWindow", "0", None))
self.label_12.setToolTip(_translate("ControlWindow", "First Spike Standard Deviation", None))
self.label_12.setText(_translate("ControlWindow", "Latency (SD) :", None))
self.spikeStandardDeviationLbl.setText(_translate("ControlWindow", "0", None))
self.spikeNanLbl.setText(_translate("ControlWindow", "0", None))
self.label_25.setToolTip(_translate("ControlWindow", "Total number of NaNs", None))
self.label_25.setText(_translate("ControlWindow", "NaNs :", None))
self.groupBox.setTitle(_translate("ControlWindow", "subwindow", None))
self.label_6.setText(_translate("ControlWindow", "start:", None))
self.label_13.setText(_translate("ControlWindow", "end:", None))
self.psthMaxBox.setText(_translate("ControlWindow", "max", None))
self.progressDock.setWindowTitle(_translate("ControlWindow", "Progress", None))
self.log_dock.setWindowTitle(_translate("ControlWindow", "Log", None))
self.detail_dock.setWindowTitle(_translate("ControlWindow", "Stimulus Details", None))
self.runComment.setWindowTitle(_translate("ControlWindow", "Comment", None))
self.label_21.setText(_translate("ControlWindow", "Cell ID:", None))
self.cellIDLbl.setText(_translate("ControlWindow", "0", None))
self.responseSpl.setWindowTitle(_translate("ControlWindow", "Response dB SPL", None))
self.timerDock.setWindowTitle(_translate("ControlWindow", "Timer", None))
self.actionSave_Options.setText(_translate("ControlWindow", "Change Data File...", None))
self.actionSet_Calibration.setText(_translate("ControlWindow", "Calibration Parameters...", None))
self.actionSet_Scale.setText(_translate("ControlWindow", "Set Scale...", None))
self.actionShow_Data_display.setText(_translate("ControlWindow", "Show Data Display", None))
self.actionShow_PSTH.setText(_translate("ControlWindow", "Show PSTH", None))
self.actionSpectrogram_Parameters.setText(_translate("ControlWindow", "Spectrogram Parameters...", None))
self.actionShow_Progress.setText(_translate("ControlWindow", "Show Progress", None))
self.actionShow_Log.setText(_translate("ControlWindow", "Show Log", None))
self.actionShow_Stimulus_Details.setText(_translate("ControlWindow", "Show Stimulus Details", None))
self.actionView_Settings.setText(_translate("ControlWindow", "View Settings...", None))
self.actionData_Display.setText(_translate("ControlWindow", "Data Display", None))
self.actionPSTH.setText(_translate("ControlWindow", "PSTH", None))
self.actionProgress.setText(_translate("ControlWindow", "Progress", None))
self.actionLog.setText(_translate("ControlWindow", "Log", None))
self.actionStimulus_Details.setText(_translate("ControlWindow", "Stimulus Details", None))
self.actionSet_Cell_ID.setText(_translate("ControlWindow", "Set Cell ID", None))
self.actionComment.setText(_translate("ControlWindow", "Comment", None))
self.actionTimer.setText(_translate("ControlWindow", "Timer", None))
self.actionResponse_SPL.setText(_translate("ControlWindow", "response SPL", None))
self.actionVocalization_paths.setText(_translate("ControlWindow", "Vocalization paths...", None))
self.actionAdvanced.setText(_translate("ControlWindow", "Advanced...", None))
from sparkle.gui.calibration_widget import CalibrationWidget
from sparkle.gui.plotdock import PlotDockWidget
from sparkle.gui.qprotocol import ProtocolView
from sparkle.gui.stim.stim_detail import StimDetailWidget
from sparkle.gui.timer_widget import TimerWidget
from sparkle.gui.data_review import QDataReviewer
from sparkle.gui.stim.explore_stim_editor import ExploreStimulusEditor
from sparkle.gui.stim.stimulus_label import StimulusLabelTable
from sparkle.gui.stim.smart_spinbox import SmartSpinBox
from sparkle.gui.plotting.pyqtgraph_widgets import PSTHWidget
| gpl-3.0 |
wangjiaxi/django-dynamic-forms | dynamic_forms/models.py | 2 | 8474 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from collections import OrderedDict
from django.core.urlresolvers import reverse
from django.db import models
from django.db.transaction import atomic
from django.template.defaultfilters import slugify
from django.utils.crypto import get_random_string
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import format_html, format_html_join
from django.utils.translation import ugettext_lazy as _
from dynamic_forms.actions import action_registry
from dynamic_forms.conf import settings
from dynamic_forms.fields import TextMultiSelectField
from dynamic_forms.formfields import formfield_registry
@python_2_unicode_compatible
class FormModel(models.Model):
name = models.CharField(_('Name'), max_length=50, unique=True)
submit_url = models.CharField(_('Submit URL'), max_length=100, unique=True,
help_text=_('The full URL path to the form. It should start '
'and end with a forward slash (<code>/</code>).'))
success_url = models.CharField(_('Success URL'), max_length=100,
help_text=_('The full URL path where the user will be '
'redirected after successfully sending the form. It should start '
'and end with a forward slash (<code>/</code>). If empty, the '
'success URL is generated by appending <code>done/</code> to the '
'“Submit URL”.'), blank=True, default='')
actions = TextMultiSelectField(_('Actions'), default='',
choices=action_registry.get_as_choices())
form_template = models.CharField(_('Form template path'), max_length=100,
default='dynamic_forms/form.html',
choices=settings.DYNAMIC_FORMS_FORM_TEMPLATES)
success_template = models.CharField(_('Success template path'),
max_length=100, default='dynamic_forms/form_success.html',
choices=settings.DYNAMIC_FORMS_SUCCESS_TEMPLATES)
allow_display = models.BooleanField(_('Allow display'), default=False,
help_text=_('Allow a user to view the input at a later time. This '
'requires the “Store in database” action to be active. The sender '
'will be given a unique URL to recall the data.'))
recipient_email = models.EmailField(_('Recipient email'), blank=True,
null=True, help_text=_('Email address to send form data.'))
class Meta:
ordering = ['name']
verbose_name = _('Dynamic form')
verbose_name_plural = _('Dynamic forms')
def __str__(self):
return self.name
def get_fields_as_dict(self):
"""
Returns an ``OrderedDict`` (``SortedDict`` when ``OrderedDict is not
available) with all fields associated with this form where their name
is the key and their label is the value.
"""
return OrderedDict(self.fields.values_list('name', 'label').all())
def save(self, *args, **kwargs):
"""
Makes sure that the ``submit_url`` and -- if defined the
``success_url`` -- end with a forward slash (``'/'``).
"""
if not self.submit_url.endswith('/'):
self.submit_url = self.submit_url + '/'
if self.success_url:
if not self.success_url.endswith('/'):
self.success_url = self.success_url + '/'
else:
self.success_url = self.submit_url + 'done/'
super(FormModel, self).save(*args, **kwargs)
@python_2_unicode_compatible
class FormFieldModel(models.Model):
parent_form = models.ForeignKey(FormModel, on_delete=models.CASCADE,
related_name='fields')
field_type = models.CharField(_('Type'), max_length=255,
choices=formfield_registry.get_as_choices())
label = models.CharField(_('Label'), max_length=255)
name = models.SlugField(_('Name'), max_length=50, blank=True)
_options = models.TextField(_('Options'), blank=True, null=True)
position = models.SmallIntegerField(_('Position'), blank=True, default=0)
class Meta:
ordering = ['parent_form', 'position']
unique_together = ("parent_form", "name",)
verbose_name = _('Form field')
verbose_name_plural = _('Form fields')
def __str__(self):
return _('Field “%(field_name)s” in form “%(form_name)s”') % {
'field_name': self.label,
'form_name': self.parent_form.name,
}
def generate_form_field(self, form):
field_type_cls = formfield_registry.get(self.field_type)
field = field_type_cls(**self.get_form_field_kwargs())
field.contribute_to_form(form)
return field
def get_form_field_kwargs(self):
kwargs = self.options
kwargs.update({
'name': self.name,
'label': self.label,
})
return kwargs
@property
def options(self):
"""Options passed to the form field during construction."""
if not hasattr(self, '_options_cached'):
self._options_cached = {}
if self._options:
try:
self._options_cached = json.loads(self._options)
except ValueError:
pass
return self._options_cached
@options.setter
def options(self, opts):
if hasattr(self, '_options_cached'):
del self._options_cached
self._options = json.dumps(opts)
def save(self, *args, **kwargs):
if not self.name:
self.name = slugify(self.label)
given_options = self.options
field_type_cls = formfield_registry.get(self.field_type)
invalid = set(self.options.keys()) - set(field_type_cls._meta.keys())
if invalid:
for key in invalid:
del given_options[key]
self.options = given_options
super(FormFieldModel, self).save(*args, **kwargs)
@python_2_unicode_compatible
class FormModelData(models.Model):
form = models.ForeignKey(FormModel, on_delete=models.SET_NULL,
related_name='data', null=True)
value = models.TextField(_('Form data'), blank=True, default='')
submitted = models.DateTimeField(_('Submitted on'), auto_now_add=True)
display_key = models.CharField(_('Display key'), max_length=24, null=True,
blank=True, db_index=True, default=None, unique=True,
help_text=_('A unique identifier that is used to allow users to view '
'their sent data. Unique over all stored data sets.'))
class Meta:
verbose_name = _('Form data')
verbose_name_plural = _('Form data')
def __str__(self):
return _('Form: “%(form)s” on %(date)s') % {
'form': self.form,
'date': self.submitted,
}
def save(self, *args, **kwargs):
with atomic():
if self.form.allow_display and not self.display_key:
dk = get_random_string(24)
while FormModelData.objects.filter(display_key=dk).exists():
dk = get_random_string(24)
self.display_key = dk
super(FormModelData, self).save(*args, **kwargs)
@property
def json_value(self):
return OrderedDict(sorted(json.loads(self.value).items()))
def pretty_value(self):
try:
value = format_html_join('',
'<dt>{0}</dt><dd>{1}</dd>',
(
(force_text(k), force_text(v))
for k, v in self.json_value.items()
)
)
return format_html('<dl>{0}</dl>', value)
except ValueError:
return self.value
pretty_value.allow_tags = True
@property
def show_url(self):
"""
If the form this data set belongs to has
:attr:`~FormModel.allow_display` ``== True``, return the permanent URL.
If displaying is not allowed, return an empty string.
"""
if self.form.allow_display:
return reverse('dynamic_forms:data-set-detail',
kwargs={'display_key': self.display_key})
return ''
@property
def show_url_link(self):
"""
Similar to :attr:`show_url` but wraps the display key in an `<a>`-tag
linking to the permanent URL.
"""
if self.form.allow_display:
return format_html('<a href="{0}">{1}</a>', self.show_url, self.display_key)
return ''
| bsd-3-clause |
raisul2010/samurai-kernel | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
nfvs/flask-security | flask_security/datastore.py | 4 | 15322 | # -*- coding: utf-8 -*-
"""
flask_security.datastore
~~~~~~~~~~~~~~~~~~~~~~~~
This module contains an user datastore classes.
:copyright: (c) 2012 by Matt Wright.
:license: MIT, see LICENSE for more details.
"""
from .utils import get_identity_attributes, string_types
class Datastore(object):
def __init__(self, db):
self.db = db
def commit(self):
pass
def put(self, model):
raise NotImplementedError
def delete(self, model):
raise NotImplementedError
class SQLAlchemyDatastore(Datastore):
def commit(self):
self.db.session.commit()
def put(self, model):
self.db.session.add(model)
return model
def delete(self, model):
self.db.session.delete(model)
class MongoEngineDatastore(Datastore):
def put(self, model):
model.save()
return model
def delete(self, model):
model.delete()
class PeeweeDatastore(Datastore):
def put(self, model):
model.save()
return model
def delete(self, model):
model.delete_instance(recursive=True)
def with_pony_session(f):
from functools import wraps
@wraps(f)
def decorator(*args, **kwargs):
from pony.orm import db_session
from pony.orm.core import local
from flask import after_this_request, current_app, has_app_context, \
has_request_context
from flask.signals import appcontext_popped
register = local.db_context_counter == 0
if register and (has_app_context() or has_request_context()):
db_session.__enter__()
result = f(*args, **kwargs)
if register:
if has_request_context():
@after_this_request
def pop(request):
db_session.__exit__()
return request
elif has_app_context():
@appcontext_popped.connect_via(
current_app._get_current_object()
)
def pop(sender, *args, **kwargs):
while local.db_context_counter:
db_session.__exit__()
else:
raise RuntimeError('Needs app or request context')
return result
return decorator
class PonyDatastore(Datastore):
def commit(self):
self.db.commit()
@with_pony_session
def put(self, model):
return model
@with_pony_session
def delete(self, model):
model.delete()
class UserDatastore(object):
"""Abstracted user datastore.
:param user_model: A user model class definition
:param role_model: A role model class definition
"""
def __init__(self, user_model, role_model):
self.user_model = user_model
self.role_model = role_model
def _prepare_role_modify_args(self, user, role):
if isinstance(user, string_types):
user = self.find_user(email=user)
if isinstance(role, string_types):
role = self.find_role(role)
return user, role
def _prepare_create_user_args(self, **kwargs):
kwargs.setdefault('active', True)
roles = kwargs.get('roles', [])
for i, role in enumerate(roles):
rn = role.name if isinstance(role, self.role_model) else role
# see if the role exists
roles[i] = self.find_role(rn)
kwargs['roles'] = roles
return kwargs
def get_user(self, id_or_email):
"""Returns a user matching the specified ID or email address."""
raise NotImplementedError
def find_user(self, *args, **kwargs):
"""Returns a user matching the provided parameters."""
raise NotImplementedError
def find_role(self, *args, **kwargs):
"""Returns a role matching the provided name."""
raise NotImplementedError
def add_role_to_user(self, user, role):
"""Adds a role to a user.
:param user: The user to manipulate
:param role: The role to add to the user
"""
user, role = self._prepare_role_modify_args(user, role)
if role not in user.roles:
user.roles.append(role)
self.put(user)
return True
return False
def remove_role_from_user(self, user, role):
"""Removes a role from a user.
:param user: The user to manipulate
:param role: The role to remove from the user
"""
rv = False
user, role = self._prepare_role_modify_args(user, role)
if role in user.roles:
rv = True
user.roles.remove(role)
self.put(user)
return rv
def toggle_active(self, user):
"""Toggles a user's active status. Always returns True."""
user.active = not user.active
return True
def deactivate_user(self, user):
"""Deactivates a specified user. Returns `True` if a change was made.
:param user: The user to deactivate
"""
if user.active:
user.active = False
return True
return False
def activate_user(self, user):
"""Activates a specified user. Returns `True` if a change was made.
:param user: The user to activate
"""
if not user.active:
user.active = True
return True
return False
def create_role(self, **kwargs):
"""Creates and returns a new role from the given parameters."""
role = self.role_model(**kwargs)
return self.put(role)
def find_or_create_role(self, name, **kwargs):
"""Returns a role matching the given name or creates it with any
additionally provided parameters.
"""
kwargs["name"] = name
return self.find_role(name) or self.create_role(**kwargs)
def create_user(self, **kwargs):
"""Creates and returns a new user from the given parameters."""
kwargs = self._prepare_create_user_args(**kwargs)
user = self.user_model(**kwargs)
return self.put(user)
def delete_user(self, user):
"""Deletes the specified user.
:param user: The user to delete
"""
self.delete(user)
class SQLAlchemyUserDatastore(SQLAlchemyDatastore, UserDatastore):
"""A SQLAlchemy datastore implementation for Flask-Security that assumes the
use of the Flask-SQLAlchemy extension.
"""
def __init__(self, db, user_model, role_model):
SQLAlchemyDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
def get_user(self, identifier):
from sqlalchemy import func as alchemyFn
user_model_query = self.user_model.query
if hasattr(self.user_model, 'roles'):
from sqlalchemy.orm import joinedload
user_model_query = user_model_query.options(joinedload('roles'))
if self._is_numeric(identifier):
return user_model_query.get(identifier)
for attr in get_identity_attributes():
query = alchemyFn.lower(getattr(self.user_model, attr)) \
== alchemyFn.lower(identifier)
rv = user_model_query.filter(query).first()
if rv is not None:
return rv
def _is_numeric(self, value):
try:
int(value)
except (TypeError, ValueError):
return False
return True
def find_user(self, **kwargs):
query = self.user_model.query
if hasattr(self.user_model, 'roles'):
from sqlalchemy.orm import joinedload
query = query.options(joinedload('roles'))
return query.filter_by(**kwargs).first()
def find_role(self, role):
return self.role_model.query.filter_by(name=role).first()
class SQLAlchemySessionUserDatastore(SQLAlchemyUserDatastore,
SQLAlchemyDatastore):
"""A SQLAlchemy datastore implementation for Flask-Security that assumes the
use of the flask_sqlalchemy_session extension.
"""
def __init__(self, session, user_model, role_model):
class PretendFlaskSQLAlchemyDb(object):
""" This is a pretend db object, so we can just pass in a session.
"""
def __init__(self, session):
self.session = session
SQLAlchemyUserDatastore.__init__(self,
PretendFlaskSQLAlchemyDb(session),
user_model,
role_model)
def commit(self):
# Old flask-sqlalchemy adds this weird attribute for tracking
# to Session. flask-sqlalchemy 2.0 does things more nicely.
try:
super(SQLAlchemySessionUserDatastore, self).commit()
except AttributeError:
import sqlalchemy
sqlalchemy.orm.Session._model_changes = {}
super(SQLAlchemySessionUserDatastore, self).commit()
class MongoEngineUserDatastore(MongoEngineDatastore, UserDatastore):
"""A MongoEngine datastore implementation for Flask-Security that assumes
the use of the Flask-MongoEngine extension.
"""
def __init__(self, db, user_model, role_model):
MongoEngineDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
def get_user(self, identifier):
from mongoengine import ValidationError
try:
return self.user_model.objects(id=identifier).first()
except (ValidationError, ValueError):
pass
for attr in get_identity_attributes():
query_key = '%s__iexact' % attr
query = {query_key: identifier}
rv = self.user_model.objects(**query).first()
if rv is not None:
return rv
def find_user(self, **kwargs):
try:
from mongoengine.queryset import Q, QCombination
except ImportError:
from mongoengine.queryset.visitor import Q, QCombination
from mongoengine.errors import ValidationError
queries = map(lambda i: Q(**{i[0]: i[1]}), kwargs.items())
query = QCombination(QCombination.AND, queries)
try:
return self.user_model.objects(query).first()
except ValidationError: # pragma: no cover
return None
def find_role(self, role):
return self.role_model.objects(name=role).first()
# TODO: Not sure why this was added but tests pass without it
# def add_role_to_user(self, user, role):
# rv = super(MongoEngineUserDatastore, self).add_role_to_user(
# user, role)
# if rv:
# self.put(user)
# return rv
class PeeweeUserDatastore(PeeweeDatastore, UserDatastore):
"""A PeeweeD datastore implementation for Flask-Security that assumes
the use of the Flask-Peewee extension.
:param user_model: A user model class definition
:param role_model: A role model class definition
:param role_link: A model implementing the many-to-many user-role relation
"""
def __init__(self, db, user_model, role_model, role_link):
PeeweeDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
self.UserRole = role_link
def get_user(self, identifier):
from peewee import fn as peeweeFn
try:
return self.user_model.get(self.user_model.id == identifier)
except ValueError:
pass
for attr in get_identity_attributes():
column = getattr(self.user_model, attr)
try:
return self.user_model.get(
peeweeFn.Lower(column) == peeweeFn.Lower(identifier))
except self.user_model.DoesNotExist:
pass
def find_user(self, **kwargs):
try:
return self.user_model.filter(**kwargs).get()
except self.user_model.DoesNotExist:
return None
def find_role(self, role):
try:
return self.role_model.filter(name=role).get()
except self.role_model.DoesNotExist:
return None
def create_user(self, **kwargs):
"""Creates and returns a new user from the given parameters."""
roles = kwargs.pop('roles', [])
user = self.user_model(**self._prepare_create_user_args(**kwargs))
user = self.put(user)
for role in roles:
self.add_role_to_user(user, role)
self.put(user)
return user
def add_role_to_user(self, user, role):
"""Adds a role to a user.
:param user: The user to manipulate
:param role: The role to add to the user
"""
user, role = self._prepare_role_modify_args(user, role)
result = self.UserRole.select().where(
self.UserRole.user == user.id,
self.UserRole.role == role.id,
)
if result.count():
return False
else:
self.put(self.UserRole.create(user=user.id, role=role.id))
return True
def remove_role_from_user(self, user, role):
"""Removes a role from a user.
:param user: The user to manipulate
:param role: The role to remove from the user
"""
user, role = self._prepare_role_modify_args(user, role)
result = self.UserRole.select().where(
self.UserRole.user == user,
self.UserRole.role == role,
)
if result.count():
query = self.UserRole.delete().where(
self.UserRole.user == user, self.UserRole.role == role)
query.execute()
return True
else:
return False
class PonyUserDatastore(PonyDatastore, UserDatastore):
"""A Pony ORM datastore implementation for Flask-Security.
Code primarily from https://github.com/ET-CS but taken over after
being abandoned.
"""
def __init__(self, db, user_model, role_model):
PonyDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
@with_pony_session
def get_user(self, identifier):
if self._is_numeric(identifier):
return self.user_model[identifier]
for attr in get_identity_attributes():
# this is a nightmare, tl;dr we need to get the thing that
# corresponds to email (usually)
user = self.user_model.get(**{attr: identifier})
if user is not None:
return user
def _is_numeric(self, value):
try:
int(value)
except ValueError:
return False
return True
@with_pony_session
def find_user(self, **kwargs):
return self.user_model.get(**kwargs)
@with_pony_session
def find_role(self, role):
return self.role_model.get(name=role)
@with_pony_session
def add_role_to_user(self, *args, **kwargs):
return super(PonyUserDatastore, self).add_role_to_user(*args, **kwargs)
@with_pony_session
def create_user(self, **kwargs):
return super(PonyUserDatastore, self).create_user(**kwargs)
@with_pony_session
def create_role(self, **kwargs):
return super(PonyUserDatastore, self).create_role(**kwargs)
| mit |
godfather1103/WeiboRobot | python27/1.0/lib/encodings/cp1256.py | 593 | 13070 | """ Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1256',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\u067e' # 0x81 -> ARABIC LETTER PEH
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\u0679' # 0x8A -> ARABIC LETTER TTEH
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
u'\u0686' # 0x8D -> ARABIC LETTER TCHEH
u'\u0698' # 0x8E -> ARABIC LETTER JEH
u'\u0688' # 0x8F -> ARABIC LETTER DDAL
u'\u06af' # 0x90 -> ARABIC LETTER GAF
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\u06a9' # 0x98 -> ARABIC LETTER KEHEH
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\u0691' # 0x9A -> ARABIC LETTER RREH
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
u'\u200c' # 0x9D -> ZERO WIDTH NON-JOINER
u'\u200d' # 0x9E -> ZERO WIDTH JOINER
u'\u06ba' # 0x9F -> ARABIC LETTER NOON GHUNNA
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u060c' # 0xA1 -> ARABIC COMMA
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u06be' # 0xAA -> ARABIC LETTER HEH DOACHASHMEE
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u061b' # 0xBA -> ARABIC SEMICOLON
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\u061f' # 0xBF -> ARABIC QUESTION MARK
u'\u06c1' # 0xC0 -> ARABIC LETTER HEH GOAL
u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0xC7 -> ARABIC LETTER ALEF
u'\u0628' # 0xC8 -> ARABIC LETTER BEH
u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0xCA -> ARABIC LETTER TEH
u'\u062b' # 0xCB -> ARABIC LETTER THEH
u'\u062c' # 0xCC -> ARABIC LETTER JEEM
u'\u062d' # 0xCD -> ARABIC LETTER HAH
u'\u062e' # 0xCE -> ARABIC LETTER KHAH
u'\u062f' # 0xCF -> ARABIC LETTER DAL
u'\u0630' # 0xD0 -> ARABIC LETTER THAL
u'\u0631' # 0xD1 -> ARABIC LETTER REH
u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
u'\u0633' # 0xD3 -> ARABIC LETTER SEEN
u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
u'\u0635' # 0xD5 -> ARABIC LETTER SAD
u'\u0636' # 0xD6 -> ARABIC LETTER DAD
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0637' # 0xD8 -> ARABIC LETTER TAH
u'\u0638' # 0xD9 -> ARABIC LETTER ZAH
u'\u0639' # 0xDA -> ARABIC LETTER AIN
u'\u063a' # 0xDB -> ARABIC LETTER GHAIN
u'\u0640' # 0xDC -> ARABIC TATWEEL
u'\u0641' # 0xDD -> ARABIC LETTER FEH
u'\u0642' # 0xDE -> ARABIC LETTER QAF
u'\u0643' # 0xDF -> ARABIC LETTER KAF
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\u0644' # 0xE1 -> ARABIC LETTER LAM
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\u0645' # 0xE3 -> ARABIC LETTER MEEM
u'\u0646' # 0xE4 -> ARABIC LETTER NOON
u'\u0647' # 0xE5 -> ARABIC LETTER HEH
u'\u0648' # 0xE6 -> ARABIC LETTER WAW
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0649' # 0xEC -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0xED -> ARABIC LETTER YEH
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u064b' # 0xF0 -> ARABIC FATHATAN
u'\u064c' # 0xF1 -> ARABIC DAMMATAN
u'\u064d' # 0xF2 -> ARABIC KASRATAN
u'\u064e' # 0xF3 -> ARABIC FATHA
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u064f' # 0xF5 -> ARABIC DAMMA
u'\u0650' # 0xF6 -> ARABIC KASRA
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0651' # 0xF8 -> ARABIC SHADDA
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\u0652' # 0xFA -> ARABIC SUKUN
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
u'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
u'\u06d2' # 0xFF -> ARABIC LETTER YEH BARREE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
google/makani | config/m600/tether.py | 1 | 1922 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tether parameters."""
from makani.config import mconfig
from makani.control import system_types
@mconfig.Config(deps={
'flight_plan': 'common.flight_plan',
'gs_model': 'base_station.gs_model',
})
def MakeParams(params):
if (params['gs_model'] == system_types.kGroundStationModelGSv2
and params['flight_plan'] == system_types.kFlightPlanHoverInPlace):
length = 80.0
else:
length = 425.8
# The following properties pertain to tether FC1-02 installed
# for RPX-08. See b/70513834 for references.
return {
# Tether length [m] under zero load.
'length': length,
# Linear density [kg/m].
'linear_density': 0.917,
# Tether outer diameter [m].
'outer_diameter': 0.0294,
# Tensile stiffness, EA, [N] of the tether core.
'tensile_stiffness': 18e6,
# Bending stiffness, EI, [N*m**2] of the tether.
'bending_stiffness': 35.0,
# Cross-sectional drag coefficient [#].
'section_drag_coeff': 0.7,
# Distance [m] from the GSG elevation pin to the tether termination pin
# on the top hat.
#
# Eli notes that the distance from the elevation pin to the nose of the
# GSG is more like 1.2m, which is roughly the point at which the tether
# can start bending.
'gsg_ele_to_termination': 0.712,
}
| apache-2.0 |
benbox69/pyload | module/plugins/hoster/SmoozedCom.py | 12 | 2392 | # -*- coding: utf-8 -*-
from module.common.json_layer import json_loads
from module.plugins.internal.MultiHoster import MultiHoster
class SmoozedCom(MultiHoster):
__name__ = "SmoozedCom"
__type__ = "hoster"
__version__ = "0.08"
__status__ = "testing"
__pattern__ = r'^unmatchable$' #: Since we want to allow the user to specify the list of hoster to use we let MultiHoster.activate
__config__ = [("use_premium" , "bool", "Use premium account if available" , True),
("revertfailed", "bool", "Revert to standard download if fails", True)]
__description__ = """Smoozed.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("", "")]
FILE_ERRORS = [("Error", r'{"state":"error"}'),
("Retry", r'{"state":"retry"}')]
def handle_free(self, pyfile):
#: In some cases hostsers do not supply us with a filename at download, so we
#: Are going to set a fall back filename (e.g. for freakshare or xfileshare)
pyfile.name = pyfile.name.split('/').pop() #: Remove everthing before last slash
#: Correction for automatic assigned filename: Removing html at end if needed
suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"]
temp = pyfile.name.split('.')
if temp.pop() in suffix_to_remove:
pyfile.name = ".".join(temp)
#: Check the link
get_data = {'session_key': self.account.get_data(self.user)['session'],
'url' : pyfile.url}
data = json_loads(self.load("http://www2.smoozed.com/api/check", get=get_data))
if data['state'] != "ok":
self.fail(data['message'])
if data['data'].get("state", "ok") != "ok":
if data['data'] == "Offline":
self.offline()
else:
self.fail(data['data']['message'])
pyfile.name = data['data']['name']
pyfile.size = int(data['data']['size'])
#: Start the download
header = self.load("http://www2.smoozed.com/api/download", get=get_data, just_header=True)
if not "location" in header:
self.fail(_("Unable to initialize download"))
else:
self.link = header['location'][-1] if isinstance(header['location'], list) else header['location']
| gpl-3.0 |
fenceFoil/canopto | tween_test_pygame.py | 1 | 4093 | #!/usr/bin/python
# M.E.Farmer 2013
# demo for tween library
# showing integration with PyGame
# moves text from random points using various tweens
# changes from random color to random color using the same tween
# Mouse click rotates through tweens and ESC closes demo
import sys
import pygame
import random
import tween
# higher number equal slower transitions
# stall/fps = seconds per transition
stall=offset = 60
FPS = 60
BACKGROUND_COLOR = (0,0,0)
size = width, height = (800,600)
text_pos = (0,0)
text_color = (0,128,0)
tweens = [
(tween.easeLinear,"easeLinear"),
(tween.easeInQuad,"easeInQuad"),
(tween.easeInOutQuad,"easeInOutQuad"),
(tween.easeOutQuad,"easeOutQuad"),
(tween.easeInCubic,"easeInCubic"),
(tween.easeInOutCubic,"easeInOutCubic"),
(tween.easeOutCubic,"easeOutCubic"),
(tween.easeInQuartic,"easeInQuartic"),
(tween.easeInOutQuartic,"easeInOutQuartic"),
(tween.easeOutQuartic,"easeOutQuartic"),
(tween.easeInQuintic,"easeInQuintic"),
(tween.easeInOutQuintic,"easeInOutQuintic"),
(tween.easeOutQuintic,"easeOutQuintic"),
(tween.easeInSine,"easeInSine"),
(tween.easeInOutSine,"easeInOutSine"),
(tween.easeOutSine,"easeOutSine"),
(tween.easeInExpo,"easeInExpo"),
(tween.easeInOutExpo,"easeInOutExpo"),
(tween.easeOutExpo,"easeOutExpo"),
(tween.easeInCirc,"easeInCirc"),
(tween.easeInOutCirc,"easeInOutCirc"),
(tween.easeOutCirc,"easeOutCirc"),
(tween.easeInElasticBig,"easeInElasticBig"),
(tween.easeOutElasticBig,"easeOutElasticBig"),
(tween.easeInElasticSmall,"easeInElasticSmall"),
(tween.easeOutElasticSmall,"easeOutElasticSmall"),
(tween.easeLoop,"easeLoop"),
(tween.easeInchWorm,"easeInchWorm"),
(tween.customTween(
"b+c*(26.65*tc*ts + -91.5925*ts*ts + 115.285*tc + -62.89*ts + 13.5475*t)"),
"customTween")
]
# setup the intial tween
tween_index = 0
ease_func,text_displayed = tweens[tween_index]
pygame.init()
screen = pygame.display.set_mode(size,pygame.FULLSCREEN)
FPSTICKER = pygame.time.Clock()
font = pygame.font.SysFont("comicsansms",65)
text = font.render(text_displayed, True, text_color)
while True:
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if tween_index == len(tweens)-1:
tween_index=0
else:
tween_index+=1
ease_func,text_displayed = tweens[tween_index]
# set our stall counter to change the tween on next check
stall = offset
elif event.type == pygame.QUIT or (event.type == pygame.KEYDOWN
and event.key == pygame.K_ESCAPE):
sys.exit()
screen.fill(BACKGROUND_COLOR)
# the pygame clock runs faster than we want to update
# our tweens so we just stall for a few cycles then
# update and reset our counter
stall+=1
if stall >= offset:
stall=0
old_pos = text_pos
text_pos = (random.randint(1,width),random.randint(1,height))
# set a new tween function for the coordinates
xy_out = tween.xyTween(ease_func,old_pos,text_pos,offset,False,True)
##x_out = tween.tween(tween.easeLoop,old_pos[0],text_pos[0],offset,False,True)
##y_out = tween.tween(tween.easeInElasticSmall,old_pos[1],text_pos[1],offset,False,True)
old_color = text_color
text_color = (random.randint(1,255),random.randint(1,255),random.randint(1,255))
# set a new tween function for the text colors
color_out = tween.colorTween(ease_func,old_color,text_color,offset,False,True)
# every frame we just call .next() and the tween does the work
text = font.render(text_displayed, True, (color_out.next()))
screen.blit(text, xy_out.next())
##screen.blit(text, (x_out.next(),y_out.next()))
pygame.display.flip()
FPSTICKER.tick(FPS)
| bsd-3-clause |
scottcunningham/ansible | test/units/parsing/test_splitter.py | 204 | 4425 | # coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from nose import tools
from ansible.compat.tests import unittest
from ansible.parsing.splitter import split_args, parse_kv
# Tests using nose's test generators cannot use unittest base class.
# http://nose.readthedocs.org/en/latest/writing_tests.html#test-generators
class TestSplitter_Gen:
SPLIT_DATA = (
(u'a',
[u'a'],
{u'_raw_params': u'a'}),
(u'a=b',
[u'a=b'],
{u'a': u'b'}),
(u'a="foo bar"',
[u'a="foo bar"'],
{u'a': u'foo bar'}),
(u'"foo bar baz"',
[u'"foo bar baz"'],
{u'_raw_params': '"foo bar baz"'}),
(u'foo bar baz',
[u'foo', u'bar', u'baz'],
{u'_raw_params': u'foo bar baz'}),
(u'a=b c="foo bar"',
[u'a=b', u'c="foo bar"'],
{u'a': u'b', u'c': u'foo bar'}),
(u'a="echo \\"hello world\\"" b=bar',
[u'a="echo \\"hello world\\""', u'b=bar'],
{u'a': u'echo "hello world"', u'b': u'bar'}),
(u'a="multi\nline"',
[u'a="multi\nline"'],
{u'a': u'multi\nline'}),
(u'a="blank\n\nline"',
[u'a="blank\n\nline"'],
{u'a': u'blank\n\nline'}),
(u'a="blank\n\n\nlines"',
[u'a="blank\n\n\nlines"'],
{u'a': u'blank\n\n\nlines'}),
(u'a="a long\nmessage\\\nabout a thing\n"',
[u'a="a long\nmessage\\\nabout a thing\n"'],
{u'a': u'a long\nmessage\\\nabout a thing\n'}),
(u'a="multiline\nmessage1\\\n" b="multiline\nmessage2\\\n"',
[u'a="multiline\nmessage1\\\n"', u'b="multiline\nmessage2\\\n"'],
{u'a': 'multiline\nmessage1\\\n', u'b': u'multiline\nmessage2\\\n'}),
(u'a={{jinja}}',
[u'a={{jinja}}'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}',
[u'a={{ jinja }}'],
{u'a': u'{{ jinja }}'}),
(u'a="{{jinja}}"',
[u'a="{{jinja}}"'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}{{jinja2}}',
[u'a={{ jinja }}{{jinja2}}'],
{u'a': u'{{ jinja }}{{jinja2}}'}),
(u'a="{{ jinja }}{{jinja2}}"',
[u'a="{{ jinja }}{{jinja2}}"'],
{u'a': u'{{ jinja }}{{jinja2}}'}),
(u'a={{jinja}} b={{jinja2}}',
[u'a={{jinja}}', u'b={{jinja2}}'],
{u'a': u'{{jinja}}', u'b': u'{{jinja2}}'}),
(u'a="{{jinja}}\n" b="{{jinja2}}\n"',
[u'a="{{jinja}}\n"', u'b="{{jinja2}}\n"'],
{u'a': u'{{jinja}}\n', u'b': u'{{jinja2}}\n'}),
(u'a="café eñyei"',
[u'a="café eñyei"'],
{u'a': u'café eñyei'}),
(u'a=café b=eñyei',
[u'a=café', u'b=eñyei'],
{u'a': u'café', u'b': u'eñyei'}),
)
def check_split_args(self, args, expected):
tools.eq_(split_args(args), expected)
def test_split_args(self):
for datapoint in self.SPLIT_DATA:
yield self.check_split_args, datapoint[0], datapoint[1]
def check_parse_kv(self, args, expected):
tools.eq_(parse_kv(args), expected)
def test_parse_kv(self):
for datapoint in self.SPLIT_DATA:
try:
yield self.check_parse_kv, datapoint[0], datapoint[2]
except: pass
| gpl-3.0 |
F4k/android_kernel_samsung_msm8930-common | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
crobby/sahara | sahara/tests/unit/utils/test_neutron.py | 5 | 3171 | # Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.tests.unit import base
from sahara.utils.openstack import neutron as neutron_client
class NeutronClientTest(base.SaharaTestCase):
@mock.patch("sahara.utils.openstack.keystone.token_auth")
@mock.patch("neutronclient.neutron.client.Client")
def test_get_router(self, patched, token_auth):
patched.side_effect = _test_get_neutron_client
neutron = neutron_client.NeutronClient(
'33b47310-b7a8-4559-bf95-45ba669a448e', None, None)
self.assertEqual('6c4d4e32-3667-4cd4-84ea-4cc1e98d18be',
neutron.get_router())
def _test_get_neutron_client(api_version, *args, **kwargs):
return FakeNeutronClient()
class FakeNeutronClient(object):
def list_routers(self):
return {"routers": [{"status": "ACTIVE", "external_gateway_info": {
"network_id": "61f95d3f-495e-4409-8c29-0b806283c81e"},
"name": "router1", "admin_state_up": True,
"tenant_id": "903809ded3434f8d89948ee71ca9f5bb",
"routes": [],
"id": "6c4d4e32-3667-4cd4-84ea-4cc1e98d18be"}]}
def list_ports(self, device_id=None):
return {"ports": [
{"status": "ACTIVE", "name": "", "admin_state_up": True,
"network_id": "33b47310-b7a8-4559-bf95-45ba669a448e",
"tenant_id": "903809ded3434f8d89948ee71ca9f5bb",
"binding:vif_type": "ovs", "device_owner": "compute:None",
"binding:capabilities": {"port_filter": True},
"mac_address": "fa:16:3e:69:25:1c", "fixed_ips": [
{"subnet_id": "bfa9d0a1-9efb-4bff-bd2b-c103c053560f",
"ip_address": "10.0.0.8"}],
"id": "0f3df685-bc55-4314-9b76-835e1767b78f",
"security_groups": ["f9fee2a2-bb0b-44e4-8092-93a43dc45cda"],
"device_id": "c2129c18-6707-4f07-94cf-00b2fef8eea7"},
{"status": "ACTIVE", "name": "", "admin_state_up": True,
"network_id": "33b47310-b7a8-4559-bf95-45ba669a448e",
"tenant_id": "903809ded3434f8d89948ee71ca9f5bb",
"binding:vif_type": "ovs",
"device_owner": "network:router_interface",
"binding:capabilities": {"port_filter": True},
"mac_address": "fa:16:3e:c5:b0:cb", "fixed_ips": [
{"subnet_id": "bfa9d0a1-9efb-4bff-bd2b-c103c053560f",
"ip_address": "10.0.0.1"}],
"id": "27193ae1-142a-436c-ab41-c77b1df032a1",
"security_groups": [],
"device_id": "6c4d4e32-3667-4cd4-84ea-4cc1e98d18be"}]}
| apache-2.0 |
haddocking/haddocking.github.io | _utilities/update-publications.py | 5 | 3075 | #!/usr/bin/env python
"""
Downloads Alexandre's pub list from his website. Converts the HTML to markdown.
"""
import urllib2
import re
year_re = re.compile('\(\d\d\d\d\)')
BOSS_URL = 'http://www.nmr.chem.uu.nl/~abonvin/publications.html'
# boss_page = urllib2.urlopen(BOSS_URL)
boss_page = open('bonvin-pubs.html')
boss_info = boss_page.readlines()
citations = []
parse = False
while boss_info:
line = boss_info.pop(0).strip().replace('<BR>', '<br>')
if line.strip().lower().startswith('<ol'):
parse = True
elif line.strip().lower().startswith('</ol>'):
break
elif parse:
# Read multiple lines until <P> is found.
# Effectively, whole citation
while '</li>' not in line.lower():
line += " " + boss_info.pop(0).strip().replace('<BR>', '<br>')
# Do not trust HTML.. messy
# Follow boss conventions
# Titles are always between double quotes
# everything before is author list
# everything after is journal / volume / etc
# journal names might come between EM, volumes in <strong>.
record = line.split('<br>')
# 1st line, authors
authors = ''
for line in record:
if line.strip()[0] == '"' or 'href' in line.lower():
break
authors += line.strip() + ' '
ori_authors = authors
authors = authors[4:].strip()
authors = authors.replace('*', '\*').replace('<font color="#333333"><b>', '**')
authors = authors.replace('</b></font>', '**')
# 2nd line, title/url
title_url = record[1].strip()
if title_url.lower().startswith('"<a href='): # has url
sta_pos = title_url.lower().rindex('"<a href="') + 10
sto_pos = title_url.rindex('">')
url = title_url[sta_pos:sto_pos]
title = title_url[sto_pos+2:-5]
else:
url = None
title = title_url[1:-1]
# 3rd line, Journal, etc
citation = ' '.join(record[2:]).replace('</li>', '').strip()
citation = citation.replace('<em>', '_').replace('</em>', '_')
citation = citation.replace('*', '\*').replace('<strong>', '*').replace('</strong>', '*')
# Year is in citation, between parenthesis
year = year_re.search(citation)
if year:
year = year.group(0)[1:-1]
citations.append({})
citations[-1]['authors'] = authors
citations[-1]['title'] = title
citations[-1]['url'] = url
citations[-1]['citation'] = citation
citations[-1]['year'] = year
# Output markdown style
prev_y = None
for i, entry in enumerate(citations):
if entry['year']:
if prev_y != entry['year']:
print "\n## {0}\n<hr />\n".format(entry['year'])
prev_y = entry['year']
print "* {1}".format(str(i+1), entry['authors'])
if entry['url']:
print "[{0}]({1})".format(entry['title'], entry['url'])
else:
print entry['title']
print entry['citation']
print
| mit |
gangadharkadam/saloon_erp | erpnext/setup/doctype/company/company.py | 3 | 9857 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
from frappe.utils import cint
import frappe.defaults
from frappe.model.document import Document
class Company(Document):
def onload(self):
self.get("__onload").transactions_exist = self.check_if_transactions_exist()
def check_if_transactions_exist(self):
exists = False
for doctype in ["Sales Invoice", "Delivery Note", "Sales Order", "Quotation",
"Purchase Invoice", "Purchase Receipt", "Purchase Order", "Supplier Quotation"]:
if frappe.db.sql("""select name from `tab%s` where company=%s and docstatus=1
limit 1""" % (doctype, "%s"), self.name):
exists = True
break
return exists
def validate(self):
self.abbr = self.abbr.strip()
if self.get('__islocal') and len(self.abbr) > 5:
frappe.throw(_("Abbreviation cannot have more than 5 characters"))
if not self.abbr.strip():
frappe.throw(_("Abbreviation is mandatory"))
self.validate_default_accounts()
self.validate_currency()
def validate_default_accounts(self):
for field in ["default_bank_account", "default_cash_account", "default_receivable_account", "default_payable_account",
"default_expense_account", "default_income_account", "stock_received_but_not_billed",
"stock_adjustment_account", "expenses_included_in_valuation"]:
if self.get(field):
for_company = frappe.db.get_value("Account", self.get(field), "company")
if for_company != self.name:
frappe.throw(_("Account {0} does not belong to company: {1}")
.format(self.get(field), self.name))
def validate_currency(self):
self.previous_default_currency = frappe.db.get_value("Company", self.name, "default_currency")
if self.default_currency and self.previous_default_currency and \
self.default_currency != self.previous_default_currency and \
self.check_if_transactions_exist():
frappe.throw(_("Cannot change company's default currency, because there are existing transactions. Transactions must be cancelled to change the default currency."))
def on_update(self):
if not frappe.db.sql("""select name from tabAccount
where company=%s and docstatus<2 limit 1""", self.name):
self.create_default_accounts()
self.create_default_warehouses()
self.install_country_fixtures()
if not frappe.db.get_value("Cost Center", {"is_group": 0, "company": self.name}):
self.create_default_cost_center()
self.set_default_accounts()
if self.default_currency:
frappe.db.set_value("Currency", self.default_currency, "enabled", 1)
frappe.clear_cache()
# frappe.db.sql("update `tabCompany` set company = '%s' where company_name = '%s' "%(self.name, self.name))
def install_country_fixtures(self):
path = os.path.join(os.path.dirname(__file__), "fixtures", self.country.lower())
if os.path.exists(path.encode("utf-8")):
frappe.get_attr("erpnext.setup.doctype.company.fixtures.{0}.install".format(self.country.lower()))(self)
def create_default_warehouses(self):
for whname in (_("Stores"), _("Work In Progress"), _("Finished Goods")):
if not frappe.db.exists("Warehouse", whname + " - " + self.abbr):
stock_group = frappe.db.get_value("Account", {"account_type": "Stock",
"is_group": 1, "company": self.name})
if stock_group:
frappe.get_doc({
"doctype":"Warehouse",
"warehouse_name": whname,
"company": self.name,
"create_account_under": stock_group
}).insert()
def create_default_accounts(self):
if not self.chart_of_accounts:
self.chart_of_accounts = "Standard"
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
create_charts(self.chart_of_accounts, self.name)
frappe.db.set(self, "default_receivable_account", frappe.db.get_value("Account",
{"company": self.name, "account_type": "Receivable"}))
frappe.db.set(self, "default_payable_account", frappe.db.get_value("Account",
{"company": self.name, "account_type": "Payable"}))
def add_acc(self, lst):
account = frappe.get_doc({
"doctype": "Account",
"freeze_account": "No",
"company": self.name
})
for d in self.fld_dict.keys():
account.set(d, (d == 'parent_account' and lst[self.fld_dict[d]]) and lst[self.fld_dict[d]] +' - '+ self.abbr or lst[self.fld_dict[d]])
if not account.parent_account:
account.flags.ignore_mandatory = True
account.insert()
def set_default_accounts(self):
self._set_default_account("default_cash_account", "Cash")
self._set_default_account("default_bank_account", "Bank")
self._set_default_account("round_off_account", "Round Off")
if cint(frappe.db.get_single_value("Accounts Settings", "auto_accounting_for_stock")):
self._set_default_account("stock_received_but_not_billed", "Stock Received But Not Billed")
self._set_default_account("stock_adjustment_account", "Stock Adjustment")
self._set_default_account("expenses_included_in_valuation", "Expenses Included In Valuation")
self._set_default_account("default_expense_account", "Cost of Goods Sold")
if not self.default_income_account:
self.db_set("default_income_account", frappe.db.get_value("Account",
{"account_name": _("Sales"), "company": self.name}))
def _set_default_account(self, fieldname, account_type):
if self.get(fieldname):
return
account = frappe.db.get_value("Account", {"account_type": account_type,
"is_group": 0, "company": self.name})
if account:
self.db_set(fieldname, account)
def create_default_cost_center(self):
cc_list = [
{
'cost_center_name': self.name,
'company':self.name,
'is_group': 1,
'parent_cost_center':None
},
{
'cost_center_name':_('Main'),
'company':self.name,
'is_group':0,
'parent_cost_center':self.name + ' - ' + self.abbr
},
]
for cc in cc_list:
cc.update({"doctype": "Cost Center"})
cc_doc = frappe.get_doc(cc)
cc_doc.flags.ignore_permissions = True
if cc.get("cost_center_name") == self.name:
cc_doc.flags.ignore_mandatory = True
cc_doc.insert()
frappe.db.set(self, "cost_center", _("Main") + " - " + self.abbr)
frappe.db.set(self, "round_off_cost_center", _("Main") + " - " + self.abbr)
def before_rename(self, olddn, newdn, merge=False):
if merge:
frappe.throw(_("Sorry, companies cannot be merged"))
def after_rename(self, olddn, newdn, merge=False):
frappe.db.set(self, "company_name", newdn)
frappe.db.sql("""update `tabDefaultValue` set defvalue=%s
where defkey='Company' and defvalue=%s""", (newdn, olddn))
frappe.defaults.clear_cache()
def on_trash(self):
"""
Trash accounts and cost centers for this company if no gl entry exists
"""
accounts = frappe.db.sql_list("select name from tabAccount where company=%s", self.name)
cost_centers = frappe.db.sql_list("select name from `tabCost Center` where company=%s", self.name)
warehouses = frappe.db.sql_list("select name from tabWarehouse where company=%s", self.name)
rec = frappe.db.sql("SELECT name from `tabGL Entry` where company = %s", self.name)
if not rec:
# delete Account
frappe.db.sql("delete from `tabAccount` where company = %s", self.name)
# delete cost center child table - budget detail
frappe.db.sql("""delete bd.* from `tabBudget Detail` bd, `tabCost Center` cc
where bd.parent = cc.name and cc.company = %s""", self.name)
#delete cost center
frappe.db.sql("delete from `tabCost Center` WHERE company = %s", self.name)
# delete account from customer and supplier
frappe.db.sql("delete from `tabParty Account` where company=%s", self.name)
if not frappe.db.get_value("Stock Ledger Entry", {"company": self.name}):
frappe.db.sql("""delete from `tabWarehouse` where company=%s""", self.name)
frappe.defaults.clear_default("company", value=self.name)
# clear default accounts, warehouses from item
if warehouses:
for f in ["default_warehouse", "website_warehouse"]:
frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)"""
% (f, f, ', '.join(['%s']*len(warehouses))), tuple(warehouses))
frappe.db.sql("""delete from `tabItem Reorder` where warehouse in (%s)"""
% ', '.join(['%s']*len(warehouses)), tuple(warehouses))
for f in ["income_account", "expense_account"]:
frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)"""
% (f, f, ', '.join(['%s']*len(accounts))), tuple(accounts))
for f in ["selling_cost_center", "buying_cost_center"]:
frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)"""
% (f, f, ', '.join(['%s']*len(cost_centers))), tuple(cost_centers))
# reset default company
frappe.db.sql("""update `tabSingles` set value=""
where doctype='Global Defaults' and field='default_company'
and value=%s""", self.name)
@frappe.whitelist()
def replace_abbr(company, old, new):
new = new.strip()
if not new:
frappe.throw(_("Abbr can not be blank or space"))
frappe.only_for("System Manager")
frappe.db.set_value("Company", company, "abbr", new)
def _rename_record(dt):
for d in frappe.db.sql("select name from `tab%s` where company=%s" % (dt, '%s'), company):
parts = d[0].rsplit(" - ", 1)
if len(parts) == 1 or parts[1].lower() == old.lower():
frappe.rename_doc(dt, d[0], parts[0] + " - " + new)
for dt in ["Account", "Cost Center", "Warehouse"]:
_rename_record(dt)
frappe.db.commit()
def get_name_with_abbr(name, company):
company_abbr = frappe.db.get_value("Company", company, "abbr")
parts = name.split(" - ")
if parts[-1].lower() != company_abbr.lower():
parts.append(company_abbr)
return " - ".join(parts)
def get_company_currency(company):
return frappe.local_cache("company_currency", company,
lambda: frappe.db.get_value("Company", company, "default_currency"))
| agpl-3.0 |
alexakarpov/python-practice | primes.py | 1 | 1815 | #!/usr/bin/env python3
from math import sqrt, ceil
import unittest
DEFAULT_NUM_OF_PRIMES = 10
def get_n_primes(n=DEFAULT_NUM_OF_PRIMES):
def is_prime(num):
if num == 1 or num == 2:
return True
for i in range(2, ceil(sqrt(num))+1):
if num % i == 0:
return False
return True
result = []
candidate = 2
while len(result) < n:
if is_prime(candidate):
result.append(candidate)
candidate += 1
return result
def print_multiplication_table(top, side):
# how wide is the largest number in the table
digits = len(str(top[-1] * side[-1]))
# how wide should the side (left) column be?
side_width = len(str(side[-1]))
# build and print the table header
head_str = " " * (side_width+1)
for n in top:
head_str += str(n).rjust(digits+1)
print(head_str)
print(" " * side_width + "_" * len(head_str))
# now build and print every row
for i in range(0, len(side)): # i is the row index
# takes care of the side 'prefix'
row_string = ("%d" % (side[i],)).rjust(side_width) + "|"
for j in range(0, len(top)):
row_string += str(top[j]*side[i]).rjust(digits+1)
print(row_string)
class InterviewProblemsTest(unittest.TestCase):
def test_get_n_primes(self):
assert([2, 3, 5, 7, 11, 13, 17, 19, 23, 29] == get_n_primes())
# not really proper tests, other than making sure we handle the edge case and don't crush
def test_print_table_single(self):
col = row = get_n_primes(1)
print_multiplication_table(row, col)
def test_print_table(self):
col = [1,2,3,4,5]
row = [6,7,8]
print_multiplication_table(row, col)
if __name__ == '__main__':
unittest.main()
| mit |
ltilve/ChromiumGStreamerBackend | tools/telemetry/telemetry/web_perf/metrics/rendering_stats_unittest.py | 9 | 21628 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import random
import unittest
from telemetry.timeline import async_slice
from telemetry.timeline import bounds
from telemetry.timeline import model
from telemetry.util import perf_tests_helper
from telemetry.util import statistics
from telemetry.web_perf.metrics import rendering_stats
class MockTimer(object):
"""A mock timer class which can generate random durations.
An instance of this class is used as a global timer to generate random
durations for stats and consistent timestamps for all mock trace events.
The unit of time is milliseconds.
"""
def __init__(self):
self.milliseconds = 0
def Advance(self, low=0.1, high=1):
delta = random.uniform(low, high)
self.milliseconds += delta
return delta
def AdvanceAndGet(self, low=0.1, high=1):
self.Advance(low, high)
return self.milliseconds
class ReferenceRenderingStats(object):
""" Stores expected data for comparison with actual RenderingStats """
def __init__(self):
self.frame_timestamps = []
self.frame_times = []
self.approximated_pixel_percentages = []
self.checkerboarded_pixel_percentages = []
def AppendNewRange(self):
self.frame_timestamps.append([])
self.frame_times.append([])
self.approximated_pixel_percentages.append([])
self.checkerboarded_pixel_percentages.append([])
class ReferenceInputLatencyStats(object):
""" Stores expected data for comparison with actual input latency stats """
def __init__(self):
self.input_event_latency = []
self.input_event = []
def AddSurfaceFlingerStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random surface flinger stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create randonm data and timestap for impl thread rendering stats.
data = {'frame_count': 1,
'refresh_period': 16.6666}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'SurfaceFlinger', 'vsync_before',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if data['frame_count'] == 1:
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
def AddDisplayRenderingStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random display rendering stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create randonm data and timestap for main thread rendering stats.
data = {'frame_count': 1}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'benchmark', 'BenchmarkInstrumentation::DisplayRenderingStats',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
def AddImplThreadRenderingStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random impl thread rendering stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create randonm data and timestap for impl thread rendering stats.
data = {'frame_count': 1,
'visible_content_area': random.uniform(0, 100),
'approximated_visible_content_area': random.uniform(0, 5),
'checkerboarded_visible_content_area': random.uniform(0, 5)}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'benchmark', 'BenchmarkInstrumentation::ImplThreadRenderingStats',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if data['frame_count'] == 1:
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
ref_stats.approximated_pixel_percentages[-1].append(
round(statistics.DivideIfPossibleOrZero(
data['approximated_visible_content_area'],
data['visible_content_area']) * 100.0, 3))
ref_stats.checkerboarded_pixel_percentages[-1].append(
round(statistics.DivideIfPossibleOrZero(
data['checkerboarded_visible_content_area'],
data['visible_content_area']) * 100.0, 3))
def AddInputLatencyStats(mock_timer, start_thread, end_thread,
ref_latency_stats=None):
""" Adds a random input latency stats event.
start_thread: The start thread on which the async slice is added.
end_thread: The end thread on which the async slice is ended.
ref_latency_stats: A ReferenceInputLatencyStats object for expected values.
"""
original_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
ui_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
begin_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
forward_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
end_comp_time = mock_timer.AdvanceAndGet(10, 20) * 1000.0
data = {rendering_stats.ORIGINAL_COMP_NAME: {'time': original_comp_time},
rendering_stats.UI_COMP_NAME: {'time': ui_comp_time},
rendering_stats.BEGIN_COMP_NAME: {'time': begin_comp_time},
rendering_stats.END_COMP_NAME: {'time': end_comp_time}}
timestamp = mock_timer.AdvanceAndGet(2, 4)
tracing_async_slice = async_slice.AsyncSlice(
'benchmark', 'InputLatency', timestamp)
async_sub_slice = async_slice.AsyncSlice(
'benchmark', rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME, timestamp)
async_sub_slice.args = {'data': data}
async_sub_slice.parent_slice = tracing_async_slice
async_sub_slice.start_thread = start_thread
async_sub_slice.end_thread = end_thread
tracing_async_slice.sub_slices.append(async_sub_slice)
tracing_async_slice.start_thread = start_thread
tracing_async_slice.end_thread = end_thread
start_thread.AddAsyncSlice(tracing_async_slice)
# Add scroll update latency info.
scroll_update_data = {
rendering_stats.BEGIN_SCROLL_UPDATE_COMP_NAME: {'time': begin_comp_time},
rendering_stats.FORWARD_SCROLL_UPDATE_COMP_NAME:
{'time': forward_comp_time},
rendering_stats.END_COMP_NAME: {'time': end_comp_time}
}
scroll_async_slice = async_slice.AsyncSlice(
'benchmark', 'InputLatency', timestamp)
scroll_async_sub_slice = async_slice.AsyncSlice(
'benchmark', rendering_stats.SCROLL_UPDATE_EVENT_NAME, timestamp)
scroll_async_sub_slice.args = {'data': scroll_update_data}
scroll_async_sub_slice.parent_slice = scroll_async_slice
scroll_async_sub_slice.start_thread = start_thread
scroll_async_sub_slice.end_thread = end_thread
scroll_async_slice.sub_slices.append(scroll_async_sub_slice)
scroll_async_slice.start_thread = start_thread
scroll_async_slice.end_thread = end_thread
start_thread.AddAsyncSlice(scroll_async_slice)
# Also add some dummy frame statistics so we can feed the resulting timeline
# to RenderingStats.
AddImplThreadRenderingStats(mock_timer, end_thread, False)
if not ref_latency_stats:
return
ref_latency_stats.input_event.append(async_sub_slice)
ref_latency_stats.input_event.append(scroll_async_sub_slice)
ref_latency_stats.input_event_latency.append((
rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME,
(data[rendering_stats.END_COMP_NAME]['time'] -
data[rendering_stats.ORIGINAL_COMP_NAME]['time']) / 1000.0))
scroll_update_time = (
scroll_update_data[rendering_stats.END_COMP_NAME]['time'] -
scroll_update_data[rendering_stats.BEGIN_SCROLL_UPDATE_COMP_NAME]['time'])
ref_latency_stats.input_event_latency.append((
rendering_stats.SCROLL_UPDATE_EVENT_NAME,
scroll_update_time / 1000.0))
class RenderingStatsUnitTest(unittest.TestCase):
def testHasRenderingStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
# A process without rendering stats
process_without_stats = timeline.GetOrCreateProcess(pid=1)
thread_without_stats = process_without_stats.GetOrCreateThread(tid=11)
process_without_stats.FinalizeImport()
self.assertFalse(rendering_stats.HasRenderingStats(thread_without_stats))
# A process with rendering stats, but no frames in them
process_without_frames = timeline.GetOrCreateProcess(pid=2)
thread_without_frames = process_without_frames.GetOrCreateThread(tid=21)
process_without_frames.FinalizeImport()
self.assertFalse(rendering_stats.HasRenderingStats(thread_without_frames))
# A process with rendering stats and frames in them
process_with_frames = timeline.GetOrCreateProcess(pid=3)
thread_with_frames = process_with_frames.GetOrCreateThread(tid=31)
AddImplThreadRenderingStats(timer, thread_with_frames, True, None)
process_with_frames.FinalizeImport()
self.assertTrue(rendering_stats.HasRenderingStats(thread_with_frames))
def testBothSurfaceFlingerAndDisplayStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
ref_stats = ReferenceRenderingStats()
ref_stats.AppendNewRange()
surface_flinger = timeline.GetOrCreateProcess(pid=4)
surface_flinger.name = 'SurfaceFlinger'
surface_flinger_thread = surface_flinger.GetOrCreateThread(tid=41)
renderer = timeline.GetOrCreateProcess(pid=2)
browser = timeline.GetOrCreateProcess(pid=3)
browser_main = browser.GetOrCreateThread(tid=31)
browser_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
# Create SurfaceFlinger stats and display rendering stats.
for i in xrange(0, 10):
first = (i == 0)
AddSurfaceFlingerStats(timer, surface_flinger_thread, first, ref_stats)
timer.Advance(2, 4)
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, None)
timer.Advance(5, 10)
browser_main.EndSlice(timer.AdvanceAndGet())
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA'])
timeline_ranges = [bounds.Bounds.CreateFromEvent(marker)
for marker in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, surface_flinger, timeline_ranges)
# Compare rendering stats to reference - Only SurfaceFlinger stats should
# count
self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, ref_stats.frame_times)
def testBothDisplayAndImplStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
ref_stats = ReferenceRenderingStats()
ref_stats.AppendNewRange()
renderer = timeline.GetOrCreateProcess(pid=2)
browser = timeline.GetOrCreateProcess(pid=3)
browser_main = browser.GetOrCreateThread(tid=31)
browser_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
# Create main, impl, and display rendering stats.
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(timer, browser_main, first, None)
timer.Advance(2, 4)
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, ref_stats)
timer.Advance(5, 10)
browser_main.EndSlice(timer.AdvanceAndGet())
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA'])
timeline_ranges = [bounds.Bounds.CreateFromEvent(marker)
for marker in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, None, timeline_ranges)
# Compare rendering stats to reference - Only display stats should count
self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, ref_stats.frame_times)
def testRangeWithoutFrames(self):
timer = MockTimer()
timeline = model.TimelineModel()
# Create a renderer process, with a main thread and impl thread.
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
renderer_compositor = renderer.GetOrCreateThread(tid=22)
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
timer.Advance(2, 4)
# Create 5 main and impl rendering stats events not within any action.
for i in xrange(0, 5):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
# Create Action B without any frames. This should trigger
# NotEnoughFramesError when the RenderingStats object is created.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA', 'ActionB'])
timeline_ranges = [bounds.Bounds.CreateFromEvent(marker)
for marker in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, None, None, timeline_ranges)
self.assertEquals(0, len(stats.frame_timestamps[1]))
def testFromTimeline(self):
timeline = model.TimelineModel()
# Create a browser process and a renderer process, and a main thread and
# impl thread for each.
browser = timeline.GetOrCreateProcess(pid=1)
browser_compositor = browser.GetOrCreateThread(tid=12)
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
renderer_compositor = renderer.GetOrCreateThread(tid=22)
timer = MockTimer()
renderer_ref_stats = ReferenceRenderingStats()
browser_ref_stats = ReferenceRenderingStats()
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, browser_ref_stats)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 5 main and impl rendering stats events not within any action.
for i in xrange(0, 5):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
AddImplThreadRenderingStats(timer, browser_compositor, first, None)
# Create 10 main and impl rendering stats events for Action B.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, browser_ref_stats)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, browser_ref_stats)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(
['ActionA', 'ActionB', 'ActionA'])
timeline_ranges = [bounds.Bounds.CreateFromEvent(marker)
for marker in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, None, timeline_ranges)
# Compare rendering stats to reference.
self.assertEquals(stats.frame_timestamps,
browser_ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, browser_ref_stats.frame_times)
self.assertEquals(stats.approximated_pixel_percentages,
renderer_ref_stats.approximated_pixel_percentages)
self.assertEquals(stats.checkerboarded_pixel_percentages,
renderer_ref_stats.checkerboarded_pixel_percentages)
def testInputLatencyFromTimeline(self):
timeline = model.TimelineModel()
# Create a browser process and a renderer process.
browser = timeline.GetOrCreateProcess(pid=1)
browser_main = browser.GetOrCreateThread(tid=11)
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
timer = MockTimer()
ref_latency = ReferenceInputLatencyStats()
# Create 10 input latency stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 5 input latency stats events not within any action.
timer.Advance(2, 4)
for _ in xrange(0, 5):
AddInputLatencyStats(timer, browser_main, renderer_main, None)
# Create 10 input latency stats events for Action B.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 10 input latency stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
browser.FinalizeImport()
renderer.FinalizeImport()
latency_events = []
timeline_markers = timeline.FindTimelineMarkers(
['ActionA', 'ActionB', 'ActionA'])
timeline_ranges = [bounds.Bounds.CreateFromEvent(marker)
for marker in timeline_markers]
for timeline_range in timeline_ranges:
if timeline_range.is_empty:
continue
latency_events.extend(rendering_stats.GetLatencyEvents(
browser, timeline_range))
self.assertEquals(latency_events, ref_latency.input_event)
event_latency_result = rendering_stats.ComputeEventLatencies(latency_events)
self.assertEquals(event_latency_result,
ref_latency.input_event_latency)
stats = rendering_stats.RenderingStats(
renderer, browser, None, timeline_ranges)
self.assertEquals(
perf_tests_helper.FlattenList(stats.input_event_latency),
[latency for name, latency in ref_latency.input_event_latency
if name != rendering_stats.SCROLL_UPDATE_EVENT_NAME])
self.assertEquals(
perf_tests_helper.FlattenList(stats.scroll_update_latency),
[latency for name, latency in ref_latency.input_event_latency
if name == rendering_stats.SCROLL_UPDATE_EVENT_NAME])
self.assertEquals(
perf_tests_helper.FlattenList(stats.gesture_scroll_update_latency),
[latency for name, latency in ref_latency.input_event_latency
if name == rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME])
| bsd-3-clause |
bingosummer/azure-linux-extensions | CustomScript/azure/storage/sharedaccesssignature.py | 46 | 8809 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import _sign_string, url_quote
from azure.storage import X_MS_VERSION
#-------------------------------------------------------------------------
# Constants for the share access signature
SIGNED_START = 'st'
SIGNED_EXPIRY = 'se'
SIGNED_RESOURCE = 'sr'
SIGNED_PERMISSION = 'sp'
SIGNED_IDENTIFIER = 'si'
SIGNED_SIGNATURE = 'sig'
SIGNED_VERSION = 'sv'
RESOURCE_BLOB = 'b'
RESOURCE_CONTAINER = 'c'
SIGNED_RESOURCE_TYPE = 'resource'
SHARED_ACCESS_PERMISSION = 'permission'
#--------------------------------------------------------------------------
class WebResource(object):
'''
Class that stands for the resource to get the share access signature
path: the resource path.
properties: dict of name and values. Contains 2 item: resource type and
permission
request_url: the url of the webresource include all the queries.
'''
def __init__(self, path=None, request_url=None, properties=None):
self.path = path
self.properties = properties or {}
self.request_url = request_url
class Permission(object):
'''
Permission class. Contains the path and query_string for the path.
path: the resource path
query_string: dict of name, values. Contains SIGNED_START, SIGNED_EXPIRY
SIGNED_RESOURCE, SIGNED_PERMISSION, SIGNED_IDENTIFIER,
SIGNED_SIGNATURE name values.
'''
def __init__(self, path=None, query_string=None):
self.path = path
self.query_string = query_string
class SharedAccessPolicy(object):
''' SharedAccessPolicy class. '''
def __init__(self, access_policy, signed_identifier=None):
self.id = signed_identifier
self.access_policy = access_policy
class SharedAccessSignature(object):
'''
The main class used to do the signing and generating the signature.
account_name:
the storage account name used to generate shared access signature
account_key: the access key to genenerate share access signature
permission_set: the permission cache used to signed the request url.
'''
def __init__(self, account_name, account_key, permission_set=None):
self.account_name = account_name
self.account_key = account_key
self.permission_set = permission_set
def generate_signed_query_string(self, path, resource_type,
shared_access_policy,
version=X_MS_VERSION):
'''
Generates the query string for path, resource type and shared access
policy.
path: the resource
resource_type: could be blob or container
shared_access_policy: shared access policy
version:
x-ms-version for storage service, or None to get a signed query
string compatible with pre 2012-02-12 clients, where the version
is not included in the query string.
'''
query_string = {}
if shared_access_policy.access_policy.start:
query_string[
SIGNED_START] = shared_access_policy.access_policy.start
if version:
query_string[SIGNED_VERSION] = version
query_string[SIGNED_EXPIRY] = shared_access_policy.access_policy.expiry
query_string[SIGNED_RESOURCE] = resource_type
query_string[
SIGNED_PERMISSION] = shared_access_policy.access_policy.permission
if shared_access_policy.id:
query_string[SIGNED_IDENTIFIER] = shared_access_policy.id
query_string[SIGNED_SIGNATURE] = self._generate_signature(
path, shared_access_policy, version)
return query_string
def sign_request(self, web_resource):
''' sign request to generate request_url with sharedaccesssignature
info for web_resource.'''
if self.permission_set:
for shared_access_signature in self.permission_set:
if self._permission_matches_request(
shared_access_signature, web_resource,
web_resource.properties[
SIGNED_RESOURCE_TYPE],
web_resource.properties[SHARED_ACCESS_PERMISSION]):
if web_resource.request_url.find('?') == -1:
web_resource.request_url += '?'
else:
web_resource.request_url += '&'
web_resource.request_url += self._convert_query_string(
shared_access_signature.query_string)
break
return web_resource
def _convert_query_string(self, query_string):
''' Converts query string to str. The order of name, values is very
important and can't be wrong.'''
convert_str = ''
if SIGNED_START in query_string:
convert_str += SIGNED_START + '=' + \
url_quote(query_string[SIGNED_START]) + '&'
convert_str += SIGNED_EXPIRY + '=' + \
url_quote(query_string[SIGNED_EXPIRY]) + '&'
convert_str += SIGNED_PERMISSION + '=' + \
query_string[SIGNED_PERMISSION] + '&'
convert_str += SIGNED_RESOURCE + '=' + \
query_string[SIGNED_RESOURCE] + '&'
if SIGNED_IDENTIFIER in query_string:
convert_str += SIGNED_IDENTIFIER + '=' + \
query_string[SIGNED_IDENTIFIER] + '&'
if SIGNED_VERSION in query_string:
convert_str += SIGNED_VERSION + '=' + \
query_string[SIGNED_VERSION] + '&'
convert_str += SIGNED_SIGNATURE + '=' + \
url_quote(query_string[SIGNED_SIGNATURE]) + '&'
return convert_str
def _generate_signature(self, path, shared_access_policy, version):
''' Generates signature for a given path and shared access policy. '''
def get_value_to_append(value, no_new_line=False):
return_value = ''
if value:
return_value = value
if not no_new_line:
return_value += '\n'
return return_value
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/' + self.account_name + path
# Form the string to sign from shared_access_policy and canonicalized
# resource. The order of values is important.
string_to_sign = \
(get_value_to_append(shared_access_policy.access_policy.permission) +
get_value_to_append(shared_access_policy.access_policy.start) +
get_value_to_append(shared_access_policy.access_policy.expiry) +
get_value_to_append(canonicalized_resource))
if version:
string_to_sign += get_value_to_append(shared_access_policy.id)
string_to_sign += get_value_to_append(version, True)
else:
string_to_sign += get_value_to_append(shared_access_policy.id, True)
return self._sign(string_to_sign)
def _permission_matches_request(self, shared_access_signature,
web_resource, resource_type,
required_permission):
''' Check whether requested permission matches given
shared_access_signature, web_resource and resource type. '''
required_resource_type = resource_type
if required_resource_type == RESOURCE_BLOB:
required_resource_type += RESOURCE_CONTAINER
for name, value in shared_access_signature.query_string.items():
if name == SIGNED_RESOURCE and \
required_resource_type.find(value) == -1:
return False
elif name == SIGNED_PERMISSION and \
required_permission.find(value) == -1:
return False
return web_resource.path.find(shared_access_signature.path) != -1
def _sign(self, string_to_sign):
''' use HMAC-SHA256 to sign the string and convert it as base64
encoded string. '''
return _sign_string(self.account_key, string_to_sign)
| apache-2.0 |
sanjeevtripurari/hue | desktop/core/ext-py/PyYAML-3.09/lib/yaml/nodes.py | 985 | 1440 |
class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
value = self.value
#if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = '<%d items>' % len(value)
#else:
# if len(value) > 75:
# value = repr(value[:70]+u' ... ')
# else:
# value = repr(value)
value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
class ScalarNode(Node):
id = 'scalar'
def __init__(self, tag, value,
start_mark=None, end_mark=None, style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class CollectionNode(Node):
def __init__(self, tag, value,
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class SequenceNode(CollectionNode):
id = 'sequence'
class MappingNode(CollectionNode):
id = 'mapping'
| apache-2.0 |
LifeDJIK/S.H.I.V.A. | containers/shiva/hazelcast/protocol/codec/map_execute_with_predicate_codec.py | 2 | 1578 | from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.map_message_type import *
REQUEST_TYPE = MAP_EXECUTEWITHPREDICATE
RESPONSE_TYPE = 117
RETRYABLE = False
def calculate_size(name, entry_processor, predicate):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += calculate_size_data(entry_processor)
data_size += calculate_size_data(predicate)
return data_size
def encode_request(name, entry_processor, predicate):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name, entry_processor, predicate))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_data(entry_processor)
client_message.append_data(predicate)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
response_size = client_message.read_int()
response = []
for response_index in xrange(0, response_size):
response_item = (client_message.read_data(), client_message.read_data())
response.append(response_item)
parameters['response'] = ImmutableLazyDataList(response, to_object)
return parameters
| mit |
MerlinZhang/osf.io | framework/auth/exceptions.py | 35 | 1985 | from framework.exceptions import FrameworkError
from website import language
class AuthError(FrameworkError):
"""Base class for auth-related errors."""
pass
class ChangePasswordError(AuthError):
"""Raised if a change password is called with invalid data.
"""
def __init__(self, message):
self.messages = message if isinstance(message, (list, tuple)) else [message]
super(ChangePasswordError, self).__init__(message)
class DuplicateEmailError(AuthError):
"""Raised if a user tries to register an email that is already in the
database.
"""
pass
class EmailConfirmTokenError(FrameworkError):
"""Base class for errors arising from the use of an email confirm token."""
pass
class InvalidTokenError(EmailConfirmTokenError):
"""Raised if an email confirmation token is not found."""
message_short = "Invalid Token"
message_long = language.INVALID_EMAIL_CONFIRM_TOKEN
class ExpiredTokenError(EmailConfirmTokenError):
"""Raised if an email confirmation token is expired."""
message_short = "Expired Token"
message_long = language.EXPIRED_EMAIL_CONFIRM_TOKEN
class MergeConfirmedRequiredError(EmailConfirmTokenError):
"""Raised if a merge is possible, but requires user confirmation"""
def __init__(self, message, user, user_to_merge, *args, **kwargs):
super(MergeConfirmedRequiredError, self).__init__(message, *args, **kwargs)
self.user_to_merge = user_to_merge
self.user = user
message_short = language.MERGE_CONFIRMATION_REQUIRED_SHORT
@property
def message_long(self):
return language.MERGE_CONFIRMATION_REQUIRED_LONG.format(
user=self.user,
user_to_merge=self.user_to_merge,
)
class MergeConflictError(EmailConfirmTokenError):
"""Raised if a merge is not possible due to a conflict"""
message_short = language.CANNOT_MERGE_ACCOUNTS_SHORT
message_long = language.CANNOT_MERGE_ACCOUNTS_LONG
| apache-2.0 |
mfazliazran/raft | widgets/MiniResponseRenderWidget.py | 11 | 5721 | #
# Author: Gregory Fleischer (gfleischer@gmail.com)
# Nathan Hamiel
#
# Copyright (c) 2011 RAFT Team
#
# This file is part of RAFT.
#
# RAFT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RAFT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RAFT. If not, see <http://www.gnu.org/licenses/>.
#
from PyQt4.QtCore import Qt, QObject, SIGNAL, QUrl
from PyQt4.QtGui import *
from PyQt4 import Qsci
from core.web.StandardPageFactory import StandardPageFactory
from core.web.RenderingWebView import RenderingWebView
from utility import ContentHelper
class MiniResponseRenderWidget(QObject):
def __init__(self, framework, tabWidget, showRequest, parent = None):
QObject.__init__(self, parent)
self.framework = framework
QObject.connect(self, SIGNAL('destroyed(QObject*)'), self._destroyed)
self.tabWidget = tabWidget
self.showRequest = showRequest
if self.showRequest:
self.reqReqEdit_Tab = QWidget(self.tabWidget)
self.tabWidget.addTab(self.reqReqEdit_Tab, 'Request')
# TODO: must this hard-coded ?
self.render_tab_index = 2
else:
self.render_tab_index = 1
self.reqResEdit_Tab = QWidget(self.tabWidget)
self.tabWidget.addTab(self.reqResEdit_Tab, 'Response')
self.reqRenderView_Tab = QWidget(self.tabWidget)
self.tabWidget.addTab(self.reqRenderView_Tab, 'Render')
# TODO: a common utility method should be used to all scintilla stuff
if self.showRequest:
self.reqReqEdit_Layout = QVBoxLayout(self.reqReqEdit_Tab)
self.reqReqEdit = Qsci.QsciScintilla(self.reqReqEdit_Tab)
self.reqReqEdit.zoomTo(self.framework.get_zoom_size())
self.reqReqEdit.setMarginLineNumbers(1, True)
self.reqReqEdit.setMarginWidth(1, '1000')
self.reqReqEdit.setWrapMode(1)
self.reqReqEdit.setWrapVisualFlags(2, 1, 0)
self.reqReqEdit_Layout.addWidget(self.reqReqEdit)
self.reqResEdit_Layout = QVBoxLayout(self.reqResEdit_Tab)
self.reqResEdit = Qsci.QsciScintilla(self.reqResEdit_Tab)
self.reqResEdit.zoomTo(self.framework.get_zoom_size())
self.reqResEdit.setMarginLineNumbers(1, True)
self.reqResEdit.setMarginWidth(1, '1000')
self.reqResEdit.setWrapMode(1)
self.reqResEdit.setWrapVisualFlags(2, 1, 0)
self.reqResEdit_Layout.addWidget(self.reqResEdit)
self.reqRenderView_Layout = QVBoxLayout(self.reqRenderView_Tab)
self.requesterPageFactory = StandardPageFactory(self.framework, None, self)
self.reqRenderView = RenderingWebView(self.framework, self.requesterPageFactory, self.tabWidget)
self.reqRenderView_Layout.addWidget(self.reqRenderView)
self.request_url = None
self.tabWidget.currentChanged.connect(self.do_render_apply)
self.framework.subscribe_zoom_in(self.zoom_in_scintilla)
self.framework.subscribe_zoom_out(self.zoom_out_scintilla)
def _destroyed(self):
self.framework.unsubscribe_zoom_in(self.zoom_in_scintilla)
self.framework.unsubscribe_zoom_out(self.zoom_out_scintilla)
def fill_from_response(self, url, headers, body, content_type = ''):
self.reqRenderView.fill_from_response(url, headers, body, content_type)
def populate_response_content(self, url, req_headers, req_body, res_headers, res_body, res_content_type = ''):
self.request_url = url
self.request_headers = req_headers
self.request_body = req_body
self.response_headers = res_headers
self.response_body = res_body
self.response_content_type = res_content_type
if self.showRequest:
self.reqReqEdit.setText(ContentHelper.getCombinedText(self.request_headers, self.request_body, ''))
# TODO: should support different lexers based on content type
lexerInstance = Qsci.QsciLexerHTML(self.reqResEdit)
lexerInstance.setFont(self.framework.get_font())
self.reqResEdit.setLexer(lexerInstance)
# TODO: should verify trailing newlines?
self.reqResEdit.setText(ContentHelper.getCombinedText(self.response_headers, self.response_body, self.response_content_type))
self.do_render_apply(self.tabWidget.currentIndex())
def do_render_apply(self, index):
if self.render_tab_index == index:
if self.request_url:
self.fill_from_response(self.request_url, self.response_headers, self.response_body, self.response_content_type)
def clear_response_render(self):
if self.showRequest:
self.reqReqEdit.setText('')
self.reqResEdit.setText('')
self.reqRenderView.setHtml('', QUrl('about:blank'))
self.request_url = ''
self.request_headers = b''
self.request_body = b''
self.response_headers = b''
self.response_body = b''
self.response_content_type = ''
def zoom_in_scintilla(self):
if self.showRequest:
self.reqReqEdit.zoomIn()
self.reqResEdit.zoomIn()
def zoom_out_scintilla(self):
if self.showRequest:
self.reqReqEdit.zoomOut()
self.reqResEdit.zoomOut()
| gpl-3.0 |
morelab/weblabdeusto | server/src/weblab/db/upgrade/regular/versions/4c23f9943036_first.py | 3 | 2534 | from __future__ import print_function, unicode_literals
"""First version
Revision ID: 4c23f9943036
Revises: None
Create Date: 2013-04-18 12:37:52.535777
Apply all the changes added since 3.9.0, if they were not already applied.
"""
# revision identifiers, used by Alembic.
revision = '4c23f9943036'
down_revision = None
# TODO: add here all the missing migrators from:
# ../admin/cli/migrate_db_40m1_to_50/migrate.py
####################################################################
#
# Take into account that we are actively supporting SQLite. SQLite
# does not support operations such as ALTER TABLE Foo DROP COLUMN
# or so, so many methods, such as "op.drop_column", will not work, and
# a work arounds (locating all the data in a new table, drop the other
# and rename) will be required.
#
def add_priority_to_permission_parameter(m):
pass
def add_initialization_in_accounting_to_permission_parameter(m):
pass
def add_access_forward_to_permission(m):
pass
def add_federation_role(m):
pass
def add_access_forward_to_federated(m):
pass
def add_admin_panel_to_administrators(m):
pass
def add_reservation_id_to_user_used_experiment(m):
pass
def add_finish_reason_to_user_experiment(m):
pass
def add_max_error_in_millis_to_user_used_experiment(m):
pass
def add_permission_id_to_user_used_experiment(m):
pass
def remove_external_entity_from_permission_type(m):
pass
def remove_applicable_permission_types(m):
# if 'UserApplicablePermissionType' in m.tables:
# op.add_column('UserPermission', 'permission_type_id', sa.Integer)
#
# results = op.execute(
# op.tables['UserPermission'].
# )
#
# op.drop_table('UserApplicablePermissionType')
pass
def remove_external_entity_is_member_of(m):
pass
def remove_external_entity_permission_parameter(m):
pass
def remove_external_entity_permission(m):
pass
def remove_external_entity_command(m):
pass
def remove_external_entity_file(m):
pass
def remove_external_entity_user_used_experiment(m):
pass
def remove_external_entity_aplicable_permission_type(m):
pass
def migrate_user_permissions(m):
pass
def migrate_group_permissions(m):
pass
def migrate_role_permissions(m):
pass
def remove_permission_type_parameter(m):
pass
def remove_permission_type(m):
pass
def upgrade():
# m = MetaData()
# m.reflect(op.get_bind())
pass
# remove_applicable_permission_types(m)
def downgrade():
# TODO
pass
| bsd-2-clause |
pyqt/maya2015-qt4 | PyQt4/uic/Compiler/qtproxies.py | 11 | 15010 | #############################################################################
##
## Copyright (C) 2012 Riverbank Computing Limited.
## Copyright (C) 2006 Thorsten Marek.
## All right reserved.
##
## This file is part of PyQt.
##
## You may use this file under the terms of the GPL v2 or the revised BSD
## license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of the Riverbank Computing Limited nor the names
## of its contributors may be used to endorse or promote products
## derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
#############################################################################
import sys
import re
from PyQt4.uic.Compiler.indenter import write_code
from PyQt4.uic.Compiler.misc import Literal, moduleMember
if sys.hexversion >= 0x03000000:
from PyQt4.uic.port_v3.proxy_base import ProxyBase
from PyQt4.uic.port_v3.as_string import as_string
else:
from PyQt4.uic.port_v2.proxy_base import ProxyBase
from PyQt4.uic.port_v2.as_string import as_string
i18n_strings = []
i18n_context = ""
def i18n_print(string):
i18n_strings.append(string)
def i18n_void_func(name):
def _printer(self, *args):
i18n_print("%s.%s(%s)" % (self, name, ", ".join(map(as_string, args))))
return _printer
def i18n_func(name):
def _printer(self, rname, *args):
i18n_print("%s = %s.%s(%s)" % (rname, self, name, ", ".join(map(as_string, args))))
return Literal(rname)
return _printer
def strict_getattr(module, clsname):
cls = getattr(module, clsname)
if issubclass(cls, LiteralProxyClass):
raise AttributeError(cls)
else:
return cls
class i18n_string(object):
def __init__(self, string, disambig):
self.string = string
self.disambig = disambig
def __str__(self):
if self.disambig is None:
disambig = "None"
else:
disambig = as_string(self.disambig, encode=False)
return '_translate("%s", %s, %s)' % (i18n_context, as_string(self.string, encode=False), disambig)
# Classes with this flag will be handled as literal values. If functions are
# called on these classes, the literal value changes.
# Example:
# the code
# >>> QSize(9,10).expandedTo(...)
# will print just that code.
AS_ARGUMENT = 2
# ATTENTION: currently, classes can either be literal or normal. If a class
# should need both kinds of behaviour, the code has to be changed.
class ProxyClassMember(object):
def __init__(self, proxy, function_name, flags):
self.proxy = proxy
self.function_name = function_name
self.flags = flags
def __str__(self):
return "%s.%s" % (self.proxy, self.function_name)
def __call__(self, *args):
if self.function_name == 'setProperty':
str_args = (as_string(args[0], encode=False), as_string(args[1]))
else:
str_args = map(as_string, args)
func_call = "%s.%s(%s)" % (self.proxy,
self.function_name,
", ".join(str_args))
if self.flags & AS_ARGUMENT:
self.proxy._uic_name = func_call
return self.proxy
else:
needs_translation = False
for arg in args:
if isinstance(arg, i18n_string):
needs_translation = True
if needs_translation:
i18n_print(func_call)
else:
write_code(func_call)
class ProxyClass(ProxyBase):
flags = 0
def __init__(self, objectname, is_attribute, args=(), noInstantiation=False):
if objectname:
if is_attribute:
objectname = "self." + objectname
self._uic_name = objectname
else:
self._uic_name = "Unnamed"
if not noInstantiation:
funcall = "%s(%s)" % \
(moduleMember(self.module, self.__class__.__name__),
", ".join(map(str, args)))
if objectname:
funcall = "%s = %s" % (objectname, funcall)
write_code(funcall)
def __str__(self):
return self._uic_name
def __getattribute__(self, attribute):
try:
return object.__getattribute__(self, attribute)
except AttributeError:
return ProxyClassMember(self, attribute, self.flags)
class LiteralProxyClass(ProxyClass):
"""LiteralObject(*args) -> new literal class
a literal class can be used as argument in a function call
>>> class Foo(LiteralProxyClass): pass
>>> str(Foo(1,2,3)) == "Foo(1,2,3)"
"""
flags = AS_ARGUMENT
def __init__(self, *args):
self._uic_name = "%s(%s)" % \
(moduleMember(self.module, self.__class__.__name__),
", ".join(map(as_string, args)))
class ProxyNamespace(ProxyBase):
pass
# These are all the Qt classes used by pyuic4 in their namespaces. If a class
# is missing, the compiler will fail, normally with an AttributeError.
#
# For adding new classes:
# - utility classes used as literal values do not need to be listed
# because they are created on the fly as subclasses of LiteralProxyClass
# - classes which are *not* QWidgets inherit from ProxyClass and they
# have to be listed explicitly in the correct namespace. These classes
# are created via a ProxyQObjectCreator
# - new QWidget-derived classes have to inherit from qtproxies.QWidget
# If the widget does not need any special methods, it can be listed
# in _qwidgets
class QtCore(ProxyNamespace):
class Qt(ProxyNamespace):
pass
## connectSlotsByName and connect have to be handled as class methods,
## otherwise they would be created as LiteralProxyClasses and never be
## printed
class QMetaObject(ProxyClass):
def connectSlotsByName(cls, *args):
ProxyClassMember(cls, "connectSlotsByName", 0)(*args)
connectSlotsByName = classmethod(connectSlotsByName)
class QObject(ProxyClass):
def metaObject(self):
class _FakeMetaObject(object):
def className(*args):
return self.__class__.__name__
return _FakeMetaObject()
def objectName(self):
return self._uic_name.split(".")[-1]
def connect(cls, *args):
# Handle slots that have names corresponding to Python keywords.
slot_name = str(args[-1])
if slot_name.endswith('.raise'):
args = list(args[:-1])
args.append(Literal(slot_name + '_'))
ProxyClassMember(cls, "connect", 0)(*args)
connect = classmethod(connect)
# These sub-class QWidget but aren't themselves sub-classed.
_qwidgets = ("QCalendarWidget", "QDialogButtonBox", "QDockWidget", "QGroupBox",
"QLineEdit", "QMainWindow", "QMenuBar", "QProgressBar", "QStatusBar",
"QToolBar", "QWizardPage")
class QtGui(ProxyNamespace):
class QApplication(QtCore.QObject):
def translate(uiname, text, disambig, encoding):
return i18n_string(text or "", disambig)
translate = staticmethod(translate)
class QIcon(ProxyClass):
class fromTheme(ProxyClass): pass
class QConicalGradient(ProxyClass): pass
class QLinearGradient(ProxyClass): pass
class QRadialGradient(ProxyClass): pass
class QBrush(ProxyClass): pass
class QPainter(ProxyClass): pass
class QPalette(ProxyClass): pass
class QFont(ProxyClass): pass
class QSpacerItem(ProxyClass): pass
class QSizePolicy(ProxyClass): pass
## QActions inherit from QObject for the metaobject stuff
## and the hierarchy has to be correct since we have a
## isinstance(x, QtGui.QLayout) call in the ui parser
class QAction(QtCore.QObject): pass
class QActionGroup(QtCore.QObject): pass
class QButtonGroup(QtCore.QObject): pass
class QLayout(QtCore.QObject): pass
class QGridLayout(QLayout): pass
class QBoxLayout(QLayout): pass
class QHBoxLayout(QBoxLayout): pass
class QVBoxLayout(QBoxLayout): pass
class QFormLayout(QLayout): pass
class QWidget(QtCore.QObject):
def font(self):
return Literal("%s.font()" % self)
def minimumSizeHint(self):
return Literal("%s.minimumSizeHint()" % self)
def sizePolicy(self):
sp = LiteralProxyClass()
sp._uic_name = "%s.sizePolicy()" % self
return sp
class QDialog(QWidget): pass
class QAbstractPrintDialog(QDialog): pass
class QColorDialog(QDialog): pass
class QFileDialog(QDialog): pass
class QFontDialog(QDialog): pass
class QInputDialog(QDialog): pass
class QMessageBox(QDialog): pass
class QPageSetupDialog(QDialog): pass
class QWizard(QDialog): pass
class QAbstractSlider(QWidget): pass
class QDial(QAbstractSlider): pass
class QScrollBar(QAbstractSlider): pass
class QSlider(QAbstractSlider): pass
class QMenu(QWidget):
def menuAction(self):
return Literal("%s.menuAction()" % self)
class QTabWidget(QWidget):
def addTab(self, *args):
text = args[-1]
if isinstance(text, i18n_string):
i18n_print("%s.setTabText(%s.indexOf(%s), %s)" % \
(self._uic_name, self._uic_name, args[0], text))
args = args[:-1] + ("", )
ProxyClassMember(self, "addTab", 0)(*args)
def indexOf(self, page):
return Literal("%s.indexOf(%s)" % (self, page))
class QComboBox(QWidget): pass
class QFontComboBox(QComboBox): pass
class QAbstractSpinBox(QWidget): pass
class QDoubleSpinBox(QAbstractSpinBox): pass
class QSpinBox(QAbstractSpinBox): pass
class QDateTimeEdit(QAbstractSpinBox): pass
class QDateEdit(QDateTimeEdit): pass
class QTimeEdit(QDateTimeEdit): pass
class QFrame(QWidget): pass
class QLabel(QFrame): pass
class QLCDNumber(QFrame): pass
class QSplitter(QFrame): pass
class QStackedWidget(QFrame): pass
class QToolBox(QFrame):
def addItem(self, *args):
text = args[-1]
if isinstance(text, i18n_string):
i18n_print("%s.setItemText(%s.indexOf(%s), %s)" % \
(self._uic_name, self._uic_name, args[0], text))
args = args[:-1] + ("", )
ProxyClassMember(self, "addItem", 0)(*args)
def indexOf(self, page):
return Literal("%s.indexOf(%s)" % (self, page))
def layout(self):
return QtGui.QLayout("%s.layout()" % self,
False, (), noInstantiation=True)
class QAbstractScrollArea(QFrame):
def viewport(self):
return QtGui.QWidget("%s.viewport()" % self, False, (),
noInstantiation=True)
class QGraphicsView(QAbstractScrollArea): pass
class QMdiArea(QAbstractScrollArea): pass
class QPlainTextEdit(QAbstractScrollArea): pass
class QScrollArea(QAbstractScrollArea): pass
class QTextEdit(QAbstractScrollArea): pass
class QTextBrowser(QTextEdit): pass
class QAbstractItemView(QAbstractScrollArea): pass
class QColumnView(QAbstractItemView): pass
class QHeaderView(QAbstractItemView): pass
class QListView(QAbstractItemView): pass
class QTableView(QAbstractItemView):
def horizontalHeader(self):
return QtGui.QHeaderView("%s.horizontalHeader()" % self,
False, (), noInstantiation=True)
def verticalHeader(self):
return QtGui.QHeaderView("%s.verticalHeader()" % self,
False, (), noInstantiation=True)
class QTreeView(QAbstractItemView):
def header(self):
return QtGui.QHeaderView("%s.header()" % self,
False, (), noInstantiation=True)
class QListWidgetItem(ProxyClass): pass
class QListWidget(QListView):
setSortingEnabled = i18n_void_func("setSortingEnabled")
isSortingEnabled = i18n_func("isSortingEnabled")
item = i18n_func("item")
class QTableWidgetItem(ProxyClass): pass
class QTableWidget(QTableView):
setSortingEnabled = i18n_void_func("setSortingEnabled")
isSortingEnabled = i18n_func("isSortingEnabled")
item = i18n_func("item")
horizontalHeaderItem = i18n_func("horizontalHeaderItem")
verticalHeaderItem = i18n_func("verticalHeaderItem")
class QTreeWidgetItem(ProxyClass):
def child(self, index):
return QtGui.QTreeWidgetItem("%s.child(%i)" % (self, index),
False, (), noInstantiation=True)
class QTreeWidget(QTreeView):
setSortingEnabled = i18n_void_func("setSortingEnabled")
isSortingEnabled = i18n_func("isSortingEnabled")
def headerItem(self):
return QtGui.QWidget("%s.headerItem()" % self, False, (),
noInstantiation=True)
def topLevelItem(self, index):
return QtGui.QTreeWidgetItem("%s.topLevelItem(%i)" % (self, index),
False, (), noInstantiation=True)
class QAbstractButton(QWidget): pass
class QCheckBox(QAbstractButton): pass
class QRadioButton(QAbstractButton): pass
class QToolButton(QAbstractButton): pass
class QPushButton(QAbstractButton): pass
class QCommandLinkButton(QPushButton): pass
# Add all remaining classes.
for _class in _qwidgets:
if _class not in locals():
locals()[_class] = type(_class, (QWidget, ), {})
| gpl-3.0 |
tiborsimko/zenodo | zenodo/factory.py | 2 | 4550 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo application factories."""
from __future__ import absolute_import
import os
import sys
from invenio_base.app import create_app_factory
from invenio_base.wsgi import create_wsgi_factory, wsgi_proxyfix
from invenio_config import create_conf_loader
from invenio_files_rest.app import Flask
from statsd import StatsClient
from werkzeug.contrib.fixers import HeaderRewriterFix
from wsgi_statsd import StatsdTimingMiddleware
from zenodo.modules.cache.bccache import RedisBytecodeCache
from . import config
env_prefix = 'APP'
invenio_conf_loader = create_conf_loader(config=config, env_prefix=env_prefix)
instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \
os.path.join(sys.prefix, 'var', 'instance')
"""Path to instance folder.
Defaults to ``<virtualenv>/var/instance/``. Can be overwritten using the
environment variable ``APP_INSTANCE_PATH``.
"""
static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \
os.path.join(instance_path, 'static')
"""Path to static folder.
Defaults to ``<virtualenv>/var/instance/static/``. Can be overwritten
using the environment variable ``APP_STATIC_FOLDER``
"""
def conf_loader(app, **kwargs_config):
"""Zenodo conf loader."""
app.url_map.strict_slashes = False # Legacy support
app.jinja_options = dict(
app.jinja_options,
cache_size=1000,
bytecode_cache=RedisBytecodeCache(app)
)
invenio_conf_loader(app, **kwargs_config)
def create_wsgi_statsd_factory(mounts_factories):
"""Create WSGI statsd factory."""
wsgi_factory = create_wsgi_factory(mounts_factories)
def create_wsgi(app, **kwargs):
application = wsgi_factory(app, **kwargs)
# Remove X-Forwarded-For headers because Flask-Security doesn't know
# how to deal with them properly. Note REMOTE_ADDR has already been
# set correctly at this point by the ``wsgi_proxyfix`` factory.
if app.config.get('WSGI_PROXIES'):
application = HeaderRewriterFix(
application,
remove_headers=['X-Forwarded-For']
)
host = app.config.get('STATSD_HOST')
port = app.config.get('STATSD_PORT', 8125)
prefix = app.config.get('STATSD_PREFIX')
if host and port and prefix:
client = StatsClient(prefix=prefix, host=host, port=port)
return StatsdTimingMiddleware(application, client)
return application
return create_wsgi
create_celery = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.apps'],
blueprint_entry_points=['invenio_base.blueprints'],
converter_entry_points=['invenio_base.converters'],
instance_path=instance_path,
static_folder=static_folder,
)
"""Create CLI/Celery application."""
create_api = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.api_apps'],
blueprint_entry_points=['invenio_base.api_blueprints'],
converter_entry_points=['invenio_base.api_converters'],
instance_path=instance_path,
app_class=Flask,
)
"""Create Flask API application."""
create_app = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.apps'],
blueprint_entry_points=['invenio_base.blueprints'],
converter_entry_points=['invenio_base.converters'],
wsgi_factory=wsgi_proxyfix(
create_wsgi_statsd_factory({'/api': create_api})),
instance_path=instance_path,
static_folder=static_folder,
)
"""Create Flask UI application."""
| gpl-2.0 |
ianjuma/ubuntu-git-notify | app/github-notif.py | 1 | 2513 | #!/usr/bin/python
import sys
import pynotify
capabilities = {'actions': False,
'body': False,
'body-hyperlinks': False,
'body-images': False,
'body-markup': False,
'icon-multi': False,
'icon-static': False,
'sound': False,
'image/svg+xml': False,
'private-synchronous': False,
'append': False,
'private-icon-only': False}
def mainWindow():
pass
def notificWin():
pass
def initCaps():
caps = pynotify.get_server_caps()
if caps is None:
print "Failed to receive server caps."
sys.exit(1)
for cap in caps:
capabilities[cap] = True
def printCaps ():
info = pynotify.get_server_info ()
print "Name: " + info["name"]
print "Vendor: " + info["vendor"]
print "Version: " + info["version"]
print "Spec. Version: " + info["spec-version"]
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit(1)
print "Supported capabilities/hints:"
if capabilities['actions']:
print "tactions"
if capabilities['body']:
print "tbody"
if capabilities['body-hyperlinks']:
print "tbody-hyperlinks"
if capabilities['body-images']:
print "tbody-images"
if capabilities['body-markup']:
print "tbody-markup"
if capabilities['icon-multi']:
print "ticon-multi"
if capabilities['icon-static']:
print "ticon-static"
if capabilities['sound']:
print "tsound"
if capabilities['image/svg+xml']:
print "timage/svg+xml"
if capabilities['private-synchronous']:
print "tprivate-synchronous"
if capabilities['append']:
print "tappend"
if capabilities['private-icon-only']:
print "tprivate-icon-only"
print "Notes:"
if info["name"] == "notify-osd":
print "tx- and y-coordinates hints are ignored"
print "texpire-timeout is ignored"
print "tbody-markup is accepted but filtered"
else:
print "tnone"
if __name__ == '__main__':
if not pynotify.init ("icon-summary"):
sys.exit(1)
# call this so we can savely use capabilities dictionary later
initCaps ()
# show what's supported
printCaps ()
# try the icon-summary case
n = pynotify.Notification ("WiFi connection lost",
"",
"notification-network-wireless-disconnected")
n.show ()
| mit |
blaiseli/p4-phylogenetics | p4/tree_optsim.py | 1 | 19918 | import sys
import string
import types
import cStringIO
import math
import copy
import os
import func
import time
import glob
from var import var
from p4exceptions import P4Error
from node import Node, NodeBranch, NodePart, NodeBranchPart
import nexustoken
from distancematrix import DistanceMatrix
import numpy
import pf
from model import Model
from data import Data
from alignment import Part
import random
if True:
def __del__(self, freeTree=pf.p4_freeTree, freeNode=pf.p4_freeNode, mysys=sys):
#mysys.stdout.write('Tree.__del__() here.\n')
# mysys.stdout.flush()
# Refers to nodes, which causes grief.
if hasattr(self, "splitKeyHash"):
del(self.splitKeyHash)
self._data = None
# self._model = None # model is needed for freeNode()
# If this is not here, then nodes tend to hang around forever ...
if 1:
for n in self.nodes:
n.wipe()
for n in self.nodes:
if n.cNode:
#mysys.stdout.write(' Tree.__del__(), freeing node %i\n' % n.nodeNum)
# mysys.stdout.flush()
freeNode(n.cNode)
n.cNode = None
for n in self.nodes:
del(n)
self.root = None
self.nodes = None
if self.cTree:
if self.doDataPart:
dp_freeTree(self.cTree)
else:
freeTree(self.cTree)
self.cTree = None
#mysys.stdout.write('Tree.__del__() finished.\n')
# mysys.stdout.flush()
def deleteCStuff(self):
"""Deletes c-pointers from nodes, self, and model, but not the data."""
# print 'Tree.deleteCStuff() here.'
for n in self.nodes:
if n.cNode:
# print ' about to free node %i, cNode %s' % (n.nodeNum,
# n.cNode)
pf.p4_freeNode(n.cNode)
n.cNode = 0
if self.cTree:
# print ' about to free cTree'
pf.p4_freeTree(self.cTree)
self.cTree = 0
# I need to delay deleting the cModel until after deleting the
# self.cStuff, because free-ing self.cStuff (eg nodes)
# requires the cModel.
if self.model and self.model.cModel:
# print ' about to free cModel'
pf.p4_freeModel(self.model.cModel)
self.model.cModel = 0
def _allocCStuff(self, resetEmpiricalComps=True):
"""Allocate c-memory for self and its nodes."""
gm = ['Tree._allocCStuff()']
# Make sure the nodeNums go from zero to N-1
for i in range(len(self.nodes)):
if self.nodes[i].nodeNum != i:
gm.append(
"Programming error: Problem with node number %i." % i)
gm.append("Nodes should be numbered consecutively from zero.")
raise P4Error(gm)
self.modelSanityCheck(resetEmpiricalComps=resetEmpiricalComps)
if not self.data.cData:
self.data._setCStuff()
if not self.model.cModel:
self.model.allocCStuff()
if var.doDataPart:
# print 'about to dp_newTree'
self.cTree = pf.dp_newTree(len(self.nodes), self.preOrder,
self.postOrder, self.data.cData, self.model.cModel)
self.doDataPart = 1
if not self.cTree:
gm.append("Unable to allocate a cTree")
raise P4Error(gm)
for n in self.nodes:
n.doDataPart = 1
# print 'about to dp_newNode (%i)' % n.nodeNum
cNode = pf.dp_newNode(
n.nodeNum, self.cTree, n.seqNum, n.isLeaf)
if not cNode:
gm.append("Unable to allocate a cNode.")
raise P4Error(gm)
n.cNode = cNode
else:
nLeaves = 0
for n in self.nodes:
if n.isLeaf:
nLeaves += 1
self.partLikes = numpy.zeros(self.model.nParts, numpy.float)
self.cTree = pf.p4_newTree(len(self.nodes), nLeaves, self.preOrder,
self.postOrder, self.partLikes, self.data.cData, self.model.cModel)
if not self.cTree:
gm.append("Unable to allocate a cTree")
raise P4Error(gm)
for i in range(len(self.nodes)):
n = self.nodes[i]
if i in self.preOrder:
inTree = 1
else:
inTree = 0
# We include the inTree as a flag for whether the node
# is in the tree or not. If the inTree flag is 0,
# then the node is not actually part of the tree, and so
# clNeedsUpdating is turned off.
n.cNode = pf.p4_newNode(
n.nodeNum, self.cTree, n.seqNum, n.isLeaf, inTree)
if not n.cNode:
gm.append("Unable to allocate a cNode")
raise P4Error(gm)
# print "finished Tree._allocCStuff()"
def setCStuff(self):
"""Transfer info about self to c-language stuff.
Transfer relationships among nodes, the root position, branch
lengths, model usage info (ie what model attributes apply to what
nodes), and pre- and post-order."""
#gm = ['Tree.setCStuff()']
# Set node relations, br.len, root, node modelNums, preOrder?,
# postOrder
# Set relations- parent, leftChild, sibling. Here's the code for
# pf.p4_setRelative(int theCNode, int relation, int relNum)
# parent- relation = 0, leftChild- relation = 1, sibling- relation
# = 2
for n in self.nodes:
if n.parent:
pf.p4_setNodeRelation(n.cNode, 0, n.parent.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 0, -1) # "-1" gives NULL
if n.leftChild:
pf.p4_setNodeRelation(n.cNode, 1, n.leftChild.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 1, -1)
if n.sibling:
pf.p4_setNodeRelation(n.cNode, 2, n.sibling.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 2, -1)
# Root
pf.p4_setTreeRoot(self.cTree, self.root.cNode)
# br.lens
for n in self.iterNodesNoRoot():
#pf.p4_setBrLen(n.cNode, n.br.len, n.br.lenChanged)
pf.p4_setBrLen(n.cNode, n.br.len)
# Model usage info
if self.model.isHet:
for pNum in range(self.model.nParts):
if self.model.parts[pNum].isHet:
# print "setCStuff(). about to setCompNum"
for n in self.nodes:
pf.p4_setCompNum(n.cNode, pNum, n.parts[pNum].compNum)
if n != self.root:
pf.p4_setRMatrixNum(
n.cNode, pNum, n.br.parts[pNum].rMatrixNum)
pf.p4_setGdasrvNum(
n.cNode, pNum, n.br.parts[pNum].gdasrvNum)
# pre- and postOrder
if not self.preAndPostOrderAreValid:
self.setPreAndPostOrder()
# for i in range(len(self.nodes)):
# pf.p4_setPreAndPostOrder(self.cTree, i, self.preOrder[i],
# self.postOrder[i]) # no longer needed
# print "finished Tree.setCStuff()"
def _commonCStuff(self, resetEmpiricalComps=True):
"""Allocate and set c-stuff, and setPrams."""
if not self.data:
if self.name:
gm = ["Tree %s (_commonCStuff)" % self.name]
else:
gm = ["Tree (_commonCStuff)"]
gm.append(
"This tree has no data attached. Before doing an optimization, likelihood")
gm.append(
"calculation, or simulation, you need to do something like this:")
gm.append(" theTree.data = theData")
raise P4Error(gm)
# print "self.cTree = %s" % self.cTree
if not self.cTree:
# This calls self.modelSanityCheck(), which calls
# self.setEmpiricalComps()
self._allocCStuff(resetEmpiricalComps=resetEmpiricalComps)
# print "About to self.model.setCStuff()"
self.model.setCStuff()
# print "About to self.setCStuff()"
self.setCStuff()
# print "about to p4_setPrams()..."
pf.p4_setPrams(self.cTree, -1) # "-1" means do all parts
def calcLogLike(self, verbose=1, resetEmpiricalComps=True):
"""Calculate the likelihood of the tree, without optimization."""
self._commonCStuff(resetEmpiricalComps=resetEmpiricalComps)
# print "about to p4_treeLogLike()..."
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
if verbose:
print "Tree.calcLogLike(). %f" % self.logLike
def optLogLike(self, verbose=1, newtAndBrentPowell=1, allBrentPowell=0, simplex=0):
"""Calculate the likelihood of the tree, with optimization.
There are 3 optimization methods-- choose one. I've made
'newtAndBrentPowell' the default, as it is fast and seems to be
working. The 'allBrentPowell' optimizer used to be the default,
as it seems to be the most robust, although it is slow. It would
be good for checking important calculations. The simplex
optimizer is the slowest, and will sometimes find better optima
for difficult data, but often fails to optimize (with no
warning)."""
if verbose:
theStartTime = time.clock()
self._commonCStuff()
# We want only one opt method.
if newtAndBrentPowell:
newtAndBrentPowell = 1
if allBrentPowell:
allBrentPowell = 1
if simplex:
simplex = 1
if (newtAndBrentPowell + allBrentPowell + simplex) != 1:
gm = ['Tree.optLogLike()']
gm.append("Choose 1 opt method.")
raise P4Error(gm)
# Do the opt.
if allBrentPowell:
pf.p4_allBrentPowellOptimize(self.cTree)
elif simplex:
pf.p4_simplexOptimize(self.cTree, self, Tree.simplexDump)
else:
pf.p4_newtSetup(self.cTree)
pf.p4_newtAndBrentPowellOpt(self.cTree)
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
# get the brLens
brLens = pf.p4_getBrLens(self.cTree)
for n in self.iterNodesNoRoot():
n.br.len = brLens[n.nodeNum]
# get the other free prams
prams = pf.p4_getFreePrams(self.cTree)
self.model.restoreFreePrams(prams)
if verbose:
print "optLogLike = %f" % self.logLike
theEndTime = time.clock()
print "cpu time %s seconds." % (theEndTime - theStartTime)
def optTest(self):
self._commonCStuff()
theStartTime = time.clock()
doXfer = 0
for i in range(1):
if doXfer:
self.model.setCStuff()
self.setCStuff()
pf.p4_setPrams(self.cTree, -1)
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
if doXfer:
# get the brLens
brLens = pf.p4_getBrLens(self.cTree)
for i in range(len(self.nodes)):
n = self.nodes[i]
if n != self.root:
n.br.len = brLens[i]
# get the other free prams
prams = pf.p4_getFreePrams(self.cTree)
self.model.restoreFreePrams(prams)
print "time %s seconds." % (time.clock() - theStartTime)
def simulate(self, calculatePatterns=True, resetSequences=True, resetNexusSetsConstantMask=True, refTree=None):
"""Simulate into the attached data.
The tree self needs to have a data and model attached.
This week, generation of random numbers uses the C language random
function, which is in stdlib on Linux. It will use the same
series of random numbers over and over, unless you tell it
otherwise. That means that (unless you tell it otherwise) it will
generate the same simulated data if you run it twice. To reset
the randomizer, you can use func.reseedCRandomizer(), eg
func.reseedCRandomizer(os.getpid())
The usual way to simulate does not use reference data. An unsual way to
simulate comes from (inspired by?) PhyloBayes, where the simulation is
conditional on the original data. It uses conditional likelihoods of
that reference data at the root. To turn that on, set refTree to the
tree+model+data that you would like to use. Calculate a likelihood with
that refTree before using it, so that conditional likelihoods are set.
The tree and model for refTree should be identical to the tree and model
for self.
Args:
calculatePatterns (bool): True by default. Whether to "compress" the
newly simulated data to facilitate a faster likelihood
calculation.
resetSequences (bool): True by default. whether to bring the
simulated sequences in C back into Python
resetNexusSetsConstantMask (bool): True by default. When
simulations are made, the constant mask in any associated nexus
sets will get out of sync. Setting this to True makes a new
mask and sets it.
refTree (Tree): None by default. If supplied, a tree+model+data
which has had its likelihood calculated, where the tree+model is
identical to self.
"""
if refTree:
from tree import Tree
assert isinstance(refTree, Tree)
assert refTree.model
assert refTree.data
if not refTree.cTree:
refTree.calcLogLike(verbose=False)
assert refTree.model.cModel
assert refTree.data.cData
self._commonCStuff()
if refTree:
assert refTree.data.cData != self.data.cData
assert refTree.data.nParts == self.data.nParts
assert refTree.data.nTax == self.data.nTax
for i in range(self.data.nTax):
assert refTree.data.taxNames[i] == self.data.taxNames[i]
assert len(refTree.data.alignments) == len(self.data.alignments)
assert refTree.logLike, "Do a likelihood calculation with the refTree before using it here."
# could have some more checks ...
# If there is a NexusSets object attached to any of the alignments
# in the Data, the constant sites mask at least will become out of sync, but we can't just
# delete the whole nexusSets object, as they define what the parts are.
# for a in self.data.alignments:
#
# if a.nexusSets:
# a.nexusSets = None
# Probably better to do something like this
# a.nexusSets.constant.mask = self.constantMask()
# at the end.
# print "About to pf.p4_simulate(self.cTree)"
if refTree:
pf.p4_simulate(self.cTree, refTree.cTree)
else:
pf.p4_simulate(self.cTree, 0)
if calculatePatterns:
for p in self.data.parts:
pf.makePatterns(p.cPart)
pf.setGlobalInvarSitesVec(p.cPart)
if resetSequences:
self.data.resetSequencesFromParts()
if resetNexusSetsConstantMask:
for a in self.data.alignments:
if a.nexusSets:
a.nexusSets.constant.mask = a.constantMask()
else:
if resetNexusSetsConstantMask:
gm = ['Tree.simulate().']
gm.append(
"resetSequences is not set, but resetNexusSetsConstantMask is set,")
gm.append("which is probably not going to work as you want.")
raise P4Error(gm)
def getSiteLikes(self):
"""Likelihoods, not log likes. Placed in self.siteLikes, a list."""
self._commonCStuff()
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 1)
self.siteLikes = []
for p in self.data.parts:
self.siteLikes += pf.getSiteLikes(p.cPart)
# def getWinningGammaCats(self):
def getSiteRates(self):
"""Get posterior mean site rate, and gamma category.
This says two things --
1. The posterior mean site rate, calculated like PAML
2. Which GDASRV category contributes most to the likelihood.
The posterior mean site rate calculation requires that there be
only one gdasrv over the tree, which will usually be the case.
For placement in categories, if its a tie score, then it is placed
in the first one.
The list of site rates, and the list of categories, both with one
value for each site, are put into separate numpy arrays, returned
as a list, ie [siteRatesArray, categoriesArray]
There is one of these lists for each data partition, and the results as a
whole are returned as a list. So if you only have one data
partition, then you get a 1-item list, and that single item is a list with 2
numpy arrays. Ie [[siteRatesArray, categoriesArray]]
If nGammaCat for a partition is 1, it will give that partition an
array of ones for the site rates and zeros for the categories.
"""
self._commonCStuff()
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
#self.winningGammaCats = []
# for p in self.data.parts:
# self.winningGammaCats += pf.getWinningGammaCats(p.cPart)
results = []
for partNum in range(len(self.data.parts)):
if len(self.model.parts[partNum].gdasrvs) > 1:
gm = ['Tree.getSiteRates()']
gm.append("Part %i has %i gdasrvs. Maximum 1 allowed." % (
partNum, len(self.model.parts[partNum].gdasrvs)))
raise P4Error(gm)
for partNum in range(len(self.data.parts)):
p = self.data.parts[partNum]
if self.model.parts[partNum].nGammaCat == 1:
siteRates = numpy.ones(p.nChar, numpy.float)
gammaCats = numpy.zeros(p.nChar, numpy.int32)
elif self.model.parts[partNum].nGammaCat > 1:
siteRates = numpy.zeros(p.nChar, numpy.float)
gammaCats = numpy.zeros(p.nChar, numpy.int32)
work = numpy.zeros(
self.model.parts[partNum].nGammaCat, numpy.float)
for charNum in range(p.nChar):
gammaCats[charNum] = -1
#pf.getWinningGammaCats(self.cTree, p.cPart, i, gammaCats, work)
pf.getSiteRates(
self.cTree, p.cPart, partNum, siteRates, gammaCats, work)
# print siteRates
# print gammaCats
# print work
if 0:
counts = numpy.zeros(
self.model.parts[partNum].nGammaCat, numpy.int32)
for charNum in range(p.nChar):
counts[winningGammaCats[charNum]] += 1
print counts
else:
raise P4Error("This should not happen.")
results.append([siteRates, gammaCats])
return results
| gpl-2.0 |
Distrotech/intellij-community | python/lib/Lib/site-packages/django/contrib/localflavor/fr/forms.py | 309 | 1747 | """
FR-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
import re
phone_digits_re = re.compile(r'^0\d(\s|\.)?(\d{2}(\s|\.)?){3}\d{2}$')
class FRZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
super(FRZipCodeField, self).__init__(r'^\d{5}$',
max_length=None, min_length=None, *args, **kwargs)
class FRPhoneNumberField(Field):
"""
Validate local French phone number (not international ones)
The correct format is '0X XX XX XX XX'.
'0X.XX.XX.XX.XX' and '0XXXXXXXXX' validate but are corrected to
'0X XX XX XX XX'.
"""
default_error_messages = {
'invalid': _('Phone numbers must be in 0X XX XX XX XX format.'),
}
def clean(self, value):
super(FRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\.|\s)', '', smart_unicode(value))
m = phone_digits_re.search(value)
if m:
return u'%s %s %s %s %s' % (value[0:2], value[2:4], value[4:6], value[6:8], value[8:10])
raise ValidationError(self.error_messages['invalid'])
class FRDepartmentSelect(Select):
"""
A Select widget that uses a list of FR departments as its choices.
"""
def __init__(self, attrs=None):
from fr_department import DEPARTMENT_ASCII_CHOICES
super(FRDepartmentSelect, self).__init__(attrs, choices=DEPARTMENT_ASCII_CHOICES)
| apache-2.0 |
mihailignatenko/erp | openerp/report/print_fnc.py | 458 | 1318 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
functions = {
'today': lambda x: time.strftime('%d/%m/%Y', time.localtime()).decode('latin1')
}
#
# TODO: call an object internal function too
#
def print_fnc(fnc, arg):
if fnc in functions:
return functions[fnc](arg)
return ''
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
tracierenea/gnuradio | docs/sphinx/hieroglyph/hieroglyph.py | 72 | 12634 | from __future__ import print_function
import re
from errors import HieroglyphError
from nodes import (Node, Raises, Except, Note, Warning, Returns, Arg,
ensure_terminal_blank)
__author__ = 'Robert Smallshire'
def parse_hieroglyph_text(lines):
'''Parse text in hieroglyph format and return a reStructuredText equivalent
Args:
lines: A sequence of strings representing the lines of a single
docstring as read from the source by Sphinx. This string should be
in a format that can be parsed by hieroglyph.
Returns:
A list of lines containing the transformed docstring as
reStructuredText as produced by hieroglyph.
Raises:
RuntimeError: If the docstring cannot be parsed.
'''
indent_lines = unindent(lines)
indent_lines = pad_blank_lines(indent_lines)
indent_lines = first_paragraph_indent(indent_lines)
indent_paragraphs = gather_lines(indent_lines)
parse_tree = group_paragraphs(indent_paragraphs)
syntax_tree = extract_structure(parse_tree)
result = syntax_tree.render_rst()
ensure_terminal_blank(result)
return result
def unindent(lines):
'''Convert an iterable of indented lines into a sequence of tuples.
The first element of each tuple is the indent in number of characters, and
the second element is the unindented string.
Args:
lines: A sequence of strings representing the lines of text in a docstring.
Returns:
A list of tuples where each tuple corresponds to one line of the input
list. Each tuple has two entries - the first is an integer giving the
size of the indent in characters, the second is the unindented text.
'''
unindented_lines = []
for line in lines:
unindented_line = line.lstrip()
indent = len(line) - len(unindented_line)
unindented_lines.append((indent, unindented_line))
return unindented_lines
def pad_blank_lines(indent_texts):
'''Give blank (empty) lines the same indent level as the preceding line.
Args:
indent_texts: An iterable of tuples each containing an integer in the
first element and a string in the second element.
Returns:
A list of tuples each containing an integer in the first element and a
string in the second element.
'''
current_indent = 0
result = []
for indent, text in indent_texts:
if len(text) > 0:
current_indent = indent
result.append((current_indent, text))
return result
def extract_structure(parse_tree):
'''Create an Abstract Syntax Tree representing the semantics of a parse tree.
Args:
parse_tree: TODO
Returns:
A Node with is the result of an Abstract Syntax Tree representing the
docstring.
Raises:
HieroglyphError: In the event that the parse tree cannot be understood.
'''
return convert_node(parse_tree)
def convert_node(node):
if node.indent == 0 and len(node.lines) == 0:
return convert_children(node)
if node.lines[0].startswith('Args:'):
return convert_args(node)
if node.lines[0].startswith('Returns:'):
return convert_returns(node)
if node.lines[0].startswith('Raises:'):
return convert_raises(node)
if node.lines[0].startswith('Note:'):
return convert_note(node)
if node.lines[0].startswith('Warning:'):
return convert_warning(node)
result = convert_children(node)
result.lines = node.lines
result.indent = node.indent
return result
def convert_children(node):
converted_children = [convert_node(child) for child in node.children]
result = Node()
result.children = converted_children
return result
ARG_REGEX = re.compile(r'(\*{0,2}\w+)(\s+\((\w+)\))?\s*:\s*(.*)')
def append_child_to_args_group_node(child, group_node, indent):
arg = None
non_empty_lines = (line for line in child.lines if line)
for line in non_empty_lines:
m = ARG_REGEX.match(line)
if m is None:
raise HieroglyphError("Invalid hieroglyph argument syntax: {0}".format(line))
param_name = m.group(1)
param_type = m.group(3)
param_text = m.group(4)
arg = Arg(indent, child.indent, param_name)
group_node.children.append(arg)
arg.type = param_type
if param_text is not None:
arg.children.append(Node(indent, [param_text], arg))
if arg is not None:
last_child = arg.children[-1] if len(arg.children) != 0 else arg
for grandchild in child.children:
last_child.children.append(grandchild)
def convert_args(node):
assert node.lines[0].startswith('Args:')
group_node = Node()
for child in node.children:
append_child_to_args_group_node(child, group_node, node.indent)
return group_node
def convert_returns(node):
assert node.lines[0].startswith('Returns:')
returns = Returns(node.indent)
returns.line = node.lines[0][8:].strip()
returns.children = node.children
return returns
def convert_note(node):
assert node.lines[0].startswith('Note:')
note = Note(node.indent)
note.line = node.lines[0][5:].strip()
note.children = node.children
return note
def convert_warning(node):
assert node.lines[0].startswith('Warning:')
warning = Warning(node.indent)
warning.line = node.lines[0][8:].strip()
warning.children = node.children
return warning
def convert_raises(node):
assert node.lines[0].startswith('Raises:')
group_node = Raises(node.indent)
for child in node.children:
append_child_to_raise_node(child, group_node)
return group_node
RAISE_REGEX = re.compile(r'(\w+)\s*:\s*(.*)')
def extract_exception_type_and_text(line):
m = RAISE_REGEX.match(line)
if m is None:
raise HieroglyphError("Invalid hieroglyph exception syntax: {0}".format(line))
return (m.group(2), m.group(1))
def append_child_to_raise_node(child, group_node):
exception = None
non_empty_lines = (line for line in child.lines if line)
for line in non_empty_lines:
exception_text, exception_type = extract_exception_type_and_text(line)
exception = Except(child.indent, exception_type)
group_node.children.append(exception) # TODO: Could use parent here.
if exception_text is not None:
exception.children.append( Node(child.indent,
[exception_text], exception))
if exception is not None:
last_child = exception.children[-1] if len(exception.children) != 0 else exception
for grandchild in child.children:
last_child.children.append(grandchild)
def group_paragraphs(indent_paragraphs):
'''
Group paragraphs so that more indented paragraphs become children of less
indented paragraphs.
'''
# The tree consists of tuples of the form (indent, [children]) where the
# children may be strings or other tuples
root = Node(0, [], None)
current_node = root
previous_indent = -1
for indent, lines in indent_paragraphs:
if indent > previous_indent:
current_node = create_child_node(current_node, indent, lines)
elif indent == previous_indent:
current_node = create_sibling_node(current_node, indent, lines)
elif indent < previous_indent:
current_node = create_uncle_node(current_node, indent, lines)
previous_indent = indent
return root
def create_sibling_node(current_node, indent, lines):
sibling = Node(indent, lines, current_node.parent)
current_node.parent.add_child(sibling)
current_node = sibling
return current_node
def create_child_node(current_node, indent, lines):
child = Node(indent, lines, current_node)
current_node.add_child(child)
current_node = child
return current_node
def create_uncle_node(current_node, indent, lines):
ancestor = current_node
while ancestor.indent >= indent:
if ancestor.parent is None:
break
ancestor = ancestor.parent
uncle = Node(indent, lines, ancestor)
ancestor.add_child(uncle)
current_node = uncle
return current_node
def gather_lines(indent_lines):
'''Split the list of (int, str) tuples into a list of (int, [str]) tuples
to group the lines into paragraphs of consistent indent.
'''
return remove_empty_paragraphs(split_separated_lines(gather_lines_by_indent(indent_lines)))
def gather_lines_by_indent(indent_lines):
result = []
previous_indent = -1
for indent, line in indent_lines:
if indent != previous_indent:
paragraph = (indent, [])
result.append(paragraph)
else:
paragraph = result[-1]
paragraph[1].append(line)
previous_indent = indent
return result
def split_separated_lines(indent_paragraphs):
result = []
for indent, paragraph in indent_paragraphs:
result.append((indent, []))
if len(paragraph) > 0:
result[-1][1].append(paragraph[0])
if len(paragraph) > 2:
for line in paragraph[1: -1]:
result[-1][1].append(line)
if len(line) == 0:
result.append((indent, []))
if len(paragraph) > 1:
result[-1][1].append(paragraph[-1])
return result
def remove_empty_paragraphs(indent_paragraphs):
return [(indent, paragraph) for indent, paragraph in indent_paragraphs if len(paragraph)]
def first_paragraph_indent(indent_texts):
'''Fix the indentation on the first paragraph.
This occurs because the first line of a multi-line docstring following the
opening quote usually has no indent.
Args:
indent_texts: The lines of the docstring as an iterable over 2-tuples
each containing an integer indent level as the first element and
the text as the second element.
Return:
A list of 2-tuples, each containing an integer indent level as the
first element and the text as the second element.
'''
opening_indent = determine_opening_indent(indent_texts)
result = []
input = iter(indent_texts)
for indent, text in input:
if indent == 0:
result.append((opening_indent, text))
else:
result.append((indent, text))
break
for indent, text in input:
result.append((indent, text))
return result
def determine_opening_indent(indent_texts):
'''Determine the opening indent level for a docstring.
The opening indent level is the indent level is the first non-zero indent
level of a non-empty line in the docstring.
Args:
indent_texts: The lines of the docstring as an iterable over 2-tuples
each containing an integer indent level as the first element and
the text as the second element.
Returns:
The opening indent level as an integer.
'''
num_lines = len(indent_texts)
if num_lines < 1:
return 0
assert num_lines >= 1
first_line_indent = indent_texts[0][0]
if num_lines == 1:
return first_line_indent
assert num_lines >= 2
second_line_indent = indent_texts[1][0]
second_line_text = indent_texts[1][1]
if len(second_line_text) == 0:
return first_line_indent
return second_line_indent
def rewrite_autodoc(app, what, name, obj, options, lines):
'''Convert lines from Hieroglyph to Sphinx format.
The function to be called by the Sphinx autodoc extension when autodoc
has read and processed a docstring. This function modified its
``lines`` argument *in place* replacing Hieroglyph syntax input into
Sphinx reStructuredText output.
Args:
apps: The Sphinx application object.
what: The type of object which the docstring belongs to. One of
'module', 'class', 'exception', 'function', 'method', 'attribute'
name: The fully qualified name of the object.
obj: The object itself.
options: The options given to the directive. An object with attributes
``inherited_members``, ``undoc_members``, ``show_inheritance`` and
``noindex`` that are ``True`` if the flag option of the same name
was given to the auto directive.
lines: The lines of the docstring. Will be modified *in place*.
'''
lines[:] = parse_hieroglyph_text(lines)
def setup(app):
app.connect('autodoc-process-docstring', rewrite_autodoc)
| gpl-3.0 |
jkettleb/iris | lib/iris/aux_factory.py | 1 | 70417 | # (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Definitions of derived coordinates.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
from abc import ABCMeta, abstractmethod, abstractproperty
import warnings
import zlib
import numpy as np
from iris._cube_coord_common import CFVariableMixin
import iris.coords
import iris.unit
import iris.util
class LazyArray(object):
"""
Represents a simplified NumPy array which is only computed on demand.
It provides the :meth:`view()` and :meth:`reshape()` methods so it
can be used in place of a standard NumPy array under some
circumstances.
The first use of either of these methods causes the array to be
computed and cached for any subsequent access.
"""
def __init__(self, shape, func, dtype=None):
"""
Args:
* shape (tuple):
The shape of the array which will be created.
* func:
The function which will be called to supply the real array.
Kwargs:
* dtype (np.dtype):
The numpy dtype of the array which will be created.
Defaults to None to signify the dtype is unknown.
"""
self.shape = tuple(shape)
self._func = func
self.dtype = dtype
self._array = None
def __repr__(self):
return '<LazyArray(shape={}, dtype={!r})>'.format(self.shape,
self.dtype)
def _cached_array(self):
if self._array is None:
self._array = np.asarray(self._func())
del self._func
return self._array
def reshape(self, *args, **kwargs):
"""
Returns a view of this array with the given shape.
See :meth:`numpy.ndarray.reshape()` for argument details.
"""
return self._cached_array().reshape(*args, **kwargs)
def to_xml_attr(self):
"""
Returns a string describing this array, suitable for use in CML.
"""
crc = zlib.crc32(np.array(self._cached_array(), order='C'))
crc &= 0xffffffff
return 'LazyArray(shape={}, checksum=0x{:08x})'.format(self.shape, crc)
def view(self, *args, **kwargs):
"""
Returns a view of this array.
See :meth:`numpy.ndarray.view()` for argument details.
"""
return self._cached_array().view(*args, **kwargs)
class AuxCoordFactory(CFVariableMixin):
"""
Represents a "factory" which can manufacture an additional auxiliary
coordinate on demand, by combining the values of other coordinates.
Each concrete subclass represents a specific formula for deriving
values from other coordinates.
The `standard_name`, `long_name`, `var_name`, `units`, `attributes` and
`coord_system` of the factory are used to set the corresponding
properties of the resulting auxiliary coordinates.
"""
__metaclass__ = ABCMeta
def __init__(self):
#: Descriptive name of the coordinate made by the factory
self.long_name = None
#: CF variable name of the coordinate made by the factory
self.var_name = None
#: Coordinate system (if any) of the coordinate made by the factory
self.coord_system = None
@abstractproperty
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
def _as_defn(self):
defn = iris.coords.CoordDefn(self.standard_name, self.long_name,
self.var_name, self.units,
self.attributes, self.coord_system)
return defn
@abstractmethod
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
@abstractmethod
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of a removal/replacement of a dependency.
Args:
* old_coord:
The dependency coordinate to be removed/replaced.
* new_coord:
If None, the dependency using old_coord is removed, otherwise
the dependency is updated to use new_coord.
"""
def __repr__(self):
def arg_text(item):
key, coord = item
return '{}={}'.format(key, str(coord and repr(coord.name())))
items = sorted(self.dependencies.items(), key=lambda item: item[0])
args = map(arg_text, items)
return '<{}({})>'.format(type(self).__name__, ', '.join(args))
def derived_dims(self, coord_dims_func):
"""
Returns the virtual dim-mapping for the derived coordinate.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
# e.g. If sigma -> [1] and orog -> [2, 3] then result = [1, 2, 3]
derived_dims = set()
for coord in self.dependencies.itervalues():
if coord:
derived_dims.update(coord_dims_func(coord))
# Apply a fixed order so we know how to map dependency dims to
# our own dims (and so the Cube can map them to Cube dims).
derived_dims = tuple(sorted(derived_dims))
return derived_dims
def updated(self, new_coord_mapping):
"""
Creates a new instance of this factory where the dependencies
are replaced according to the given mapping.
Args:
* new_coord_mapping:
A dictionary mapping from the object IDs potentially used
by this factory, to the coordinate objects that should be
used instead.
"""
new_dependencies = {}
for key, coord in self.dependencies.iteritems():
if coord:
coord = new_coord_mapping[id(coord)]
new_dependencies[key] = coord
return type(self)(**new_dependencies)
def xml_element(self, doc):
"""
Returns a DOM element describing this coordinate factory.
"""
element = doc.createElement('coordFactory')
for key, coord in self.dependencies.iteritems():
element.setAttribute(key, coord._xml_id())
element.appendChild(self.make_coord().xml_element(doc))
return element
def _dependency_dims(self, coord_dims_func):
dependency_dims = {}
for key, coord in self.dependencies.iteritems():
if coord:
dependency_dims[key] = coord_dims_func(coord)
return dependency_dims
def _nd_bounds(self, coord, dims, ndim):
"""
Returns the coord's bounds in Cube-orientation and
broadcastable to N dimensions.
Example:
coord.shape == (70,)
coord.nbounds = 2
dims == [3]
ndim == 5
results in:
nd_bounds.shape == (1, 1, 1, 70, 1, 2)
"""
# Transpose to be consistent with the Cube.
sorted_pairs = sorted(enumerate(dims), key=lambda pair: pair[1])
transpose_order = [pair[0] for pair in sorted_pairs] + [len(dims)]
bounds = coord.bounds
if dims:
bounds = bounds.transpose(transpose_order)
# Figure out the n-dimensional shape.
nd_shape = [1] * ndim + [coord.nbounds]
for dim, size in zip(dims, coord.shape):
nd_shape[dim] = size
bounds.shape = tuple(nd_shape)
return bounds
@staticmethod
def _nd_points(coord, dims, ndim):
"""
Returns the coord's points in Cube-orientation and
broadcastable to N dimensions.
Example:
coord.shape == (4, 3)
dims == [3, 2]
ndim == 5
results in:
nd_points.shape == (1, 1, 3, 4, 1)
"""
# Transpose to be consistent with the Cube.
sorted_pairs = sorted(enumerate(dims), key=lambda pair: pair[1])
transpose_order = [pair[0] for pair in sorted_pairs]
points = coord._points
if dims and transpose_order != list(range(len(dims))):
points = points.transpose(transpose_order)
# Expand dimensionality to be consistent with the Cube.
if dims:
keys = [None] * ndim
for dim, size in zip(dims, coord.shape):
keys[dim] = slice(None)
points = points[tuple(keys)]
else:
# Scalar coordinates have one dimensional points despite
# mapping to zero dimensions, so we only need to add N-1
# new dimensions.
keys = (None,) * (ndim - 1)
points = points[keys]
return points
def _remap(self, dependency_dims, derived_dims):
if derived_dims:
ndim = max(derived_dims) + 1
else:
ndim = 1
nd_points_by_key = {}
for key, coord in self.dependencies.iteritems():
if coord:
# Get the points as consistent with the Cube.
nd_points = self._nd_points(coord, dependency_dims[key], ndim)
# Restrict to just the dimensions relevant to the
# derived coord. NB. These are always in Cube-order, so
# no transpose is needed.
if derived_dims:
keys = tuple(slice(None) if dim in derived_dims else 0 for
dim in range(ndim))
nd_points = nd_points[keys]
else:
# If no coord, treat value as zero.
# Use a float16 to provide `shape` attribute and avoid
# promoting other arguments to a higher precision.
nd_points = np.float16(0)
nd_points_by_key[key] = nd_points
return nd_points_by_key
def _remap_with_bounds(self, dependency_dims, derived_dims):
if derived_dims:
ndim = max(derived_dims) + 1
else:
ndim = 1
nd_values_by_key = {}
for key, coord in self.dependencies.iteritems():
if coord:
# Get the bounds or points as consistent with the Cube.
if coord.nbounds:
nd_values = self._nd_bounds(coord, dependency_dims[key],
ndim)
else:
nd_values = self._nd_points(coord, dependency_dims[key],
ndim)
# Restrict to just the dimensions relevant to the
# derived coord. NB. These are always in Cube-order, so
# no transpose is needed.
shape = []
for dim in derived_dims:
shape.append(nd_values.shape[dim])
# Ensure the array always has at least one dimension to be
# compatible with normal coordinates.
if not derived_dims:
shape.append(1)
# Add on the N-bounds dimension
if coord.nbounds:
shape.append(nd_values.shape[-1])
else:
# NB. For a non-bounded coordinate we still need an
# extra dimension to make the shape compatible, so
# we just add an extra 1.
shape.append(1)
nd_values = np.array(nd_values)
nd_values.shape = shape
else:
# If no coord, treat value as zero.
# Use a float16 to provide `shape` attribute and avoid
# promoting other arguments to a higher precision.
nd_values = np.float16(0)
nd_values_by_key[key] = nd_values
return nd_values_by_key
def _shape(self, nd_values_by_key):
nd_values = sorted(nd_values_by_key.values(),
key=lambda value: value.ndim)
shape = list(nd_values.pop().shape)
for array in nd_values:
for i, size in enumerate(array.shape):
if size > 1:
# NB. If there's an inconsistency it can only come
# from a mismatch in the number of bounds (the Cube
# ensures the other dimensions must match).
# But we can't afford to raise an error now - it'd
# break Cube.derived_coords. Instead, we let the
# error happen when the derived coordinate's bounds
# are accessed.
shape[i] = size
return shape
def _dtype(self, arrays_by_key, **other_args):
dummy_args = {}
for key, array in arrays_by_key.iteritems():
dummy_args[key] = np.zeros(1, dtype=array.dtype)
dummy_args.update(other_args)
dummy_data = self._derive(**dummy_args)
return dummy_data.dtype
class HybridHeightFactory(AuxCoordFactory):
"""
Defines a hybrid-height coordinate factory with the formula:
z = a + b * orog
"""
def __init__(self, delta=None, sigma=None, orography=None):
"""
Creates a hybrid-height coordinate factory with the formula:
z = a + b * orog
At least one of `delta` or `orography` must be provided.
Args:
* delta: Coord
The coordinate providing the `a` term.
* sigma: Coord
The coordinate providing the `b` term.
* orography: Coord
The coordinate providing the `orog` term.
"""
super(HybridHeightFactory, self).__init__()
if delta and delta.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate: must have either 0 or'
' 2 bounds.')
if sigma and sigma.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate: must have either 0 or'
' 2 bounds.')
if orography and orography.nbounds:
msg = 'Orography coordinate {!r} has bounds.' \
' These will be disregarded.'.format(orography.name())
warnings.warn(msg, UserWarning, stacklevel=2)
self.delta = delta
self.sigma = sigma
self.orography = orography
self.standard_name = 'altitude'
if delta is None and orography is None:
raise ValueError('Unable to determine units: no delta or orography'
' available.')
if delta and orography and delta.units != orography.units:
raise ValueError('Incompatible units: delta and orography must'
' have the same units.')
self.units = (delta and delta.units) or orography.units
if not self.units.is_convertible('m'):
raise ValueError('Invalid units: delta and/or orography'
' must be expressed in length units.')
self.attributes = {'positive': 'up'}
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return {'delta': self.delta, 'sigma': self.sigma,
'orography': self.orography}
def _derive(self, delta, sigma, orography):
temp = delta + sigma * orography
return temp
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['delta'],
nd_points_by_key['sigma'],
nd_points_by_key['orography'])
shape = self._shape(nd_points_by_key)
dtype = self._dtype(nd_points_by_key)
points = LazyArray(shape, calc_points, dtype)
bounds = None
if ((self.delta and self.delta.nbounds) or
(self.sigma and self.sigma.nbounds)):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
# Define the function here to obtain a closure.
def calc_bounds():
delta = nd_values_by_key['delta']
sigma = nd_values_by_key['sigma']
orography = nd_values_by_key['orography']
ok_bound_shapes = [(), (1,), (2,)]
if delta.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid delta coordinate bounds.')
if sigma.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid sigma coordinate bounds.')
if orography.shape[-1:] not in [(), (1,)]:
warnings.warn('Orography coordinate has bounds. '
'These are being disregarded.',
UserWarning, stacklevel=2)
orography_pts = nd_points_by_key['orography']
orography_pts_shape = list(orography_pts.shape)
orography = orography_pts.reshape(
orography_pts_shape.append(1))
return self._derive(delta, sigma, orography)
b_shape = self._shape(nd_values_by_key)
b_dtype = self._dtype(nd_values_by_key)
bounds = LazyArray(b_shape, calc_bounds, b_dtype)
hybrid_height = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return hybrid_height
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
if self.delta is old_coord:
if new_coord and new_coord.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate:'
' must have either 0 or 2 bounds.')
self.delta = new_coord
elif self.sigma is old_coord:
if new_coord and new_coord.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate:'
' must have either 0 or 2 bounds.')
self.sigma = new_coord
elif self.orography is old_coord:
if new_coord and new_coord.nbounds:
msg = 'Orography coordinate {!r} has bounds.' \
' These will be disregarded.'.format(new_coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
self.orography = new_coord
class HybridPressureFactory(AuxCoordFactory):
"""
Defines a hybrid-pressure coordinate factory with the formula:
p = ap + b * ps
"""
def __init__(self, delta=None, sigma=None, surface_air_pressure=None):
"""
Creates a hybrid-height coordinate factory with the formula:
p = ap + b * ps
At least one of `delta` or `surface_air_pressure` must be provided.
Args:
* delta: Coord
The coordinate providing the `ap` term.
* sigma: Coord
The coordinate providing the `b` term.
* surface_air_pressure: Coord
The coordinate providing the `ps` term.
"""
super(HybridPressureFactory, self).__init__()
# Check that provided coords meet necessary conditions.
self._check_dependencies(delta, sigma, surface_air_pressure)
self.delta = delta
self.sigma = sigma
self.surface_air_pressure = surface_air_pressure
self.standard_name = 'air_pressure'
self.attributes = {}
@property
def units(self):
if self.delta is not None:
units = self.delta.units
else:
units = self.surface_air_pressure.units
return units
@staticmethod
def _check_dependencies(delta, sigma,
surface_air_pressure):
# Check for sufficient coordinates.
if (delta is None and (sigma is None or
surface_air_pressure is None)):
msg = 'Unable to contruct hybrid pressure coordinate factory ' \
'due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds.
if delta and delta.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate: must have either 0 or'
' 2 bounds.')
if sigma and sigma.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate: must have either 0 or'
' 2 bounds.')
if surface_air_pressure and surface_air_pressure.nbounds:
msg = 'Surface pressure coordinate {!r} has bounds. These will' \
' be disregarded.'.format(surface_air_pressure.name())
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
if sigma is not None and not sigma.units.is_dimensionless():
raise ValueError('Invalid units: sigma must be dimensionless.')
if delta is not None and surface_air_pressure is not None and \
delta.units != surface_air_pressure.units:
msg = 'Incompatible units: delta and ' \
'surface_air_pressure must have the same units.'
raise ValueError(msg)
if delta is not None:
units = delta.units
else:
units = surface_air_pressure.units
if not units.is_convertible('Pa'):
msg = 'Invalid units: delta and ' \
'surface_air_pressure must have units of pressure.'
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return {'delta': self.delta, 'sigma': self.sigma,
'surface_air_pressure': self.surface_air_pressure}
def _derive(self, delta, sigma, surface_air_pressure):
temp = delta + sigma * surface_air_pressure
return temp
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['delta'],
nd_points_by_key['sigma'],
nd_points_by_key['surface_air_pressure'])
shape = self._shape(nd_points_by_key)
dtype = self._dtype(nd_points_by_key)
points = LazyArray(shape, calc_points, dtype)
bounds = None
if ((self.delta and self.delta.nbounds) or
(self.sigma and self.sigma.nbounds)):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
# Define the function here to obtain a closure.
def calc_bounds():
delta = nd_values_by_key['delta']
sigma = nd_values_by_key['sigma']
surface_air_pressure = nd_values_by_key['surface_air_pressure']
ok_bound_shapes = [(), (1,), (2,)]
if delta.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid delta coordinate bounds.')
if sigma.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid sigma coordinate bounds.')
if surface_air_pressure.shape[-1:] not in [(), (1,)]:
warnings.warn('Surface pressure coordinate has bounds. '
'These are being disregarded.')
surface_air_pressure_pts = nd_points_by_key[
'surface_air_pressure']
surface_air_pressure_pts_shape = list(
surface_air_pressure_pts.shape)
surface_air_pressure = surface_air_pressure_pts.reshape(
surface_air_pressure_pts_shape.append(1))
return self._derive(delta, sigma, surface_air_pressure)
b_shape = self._shape(nd_values_by_key)
b_dtype = self._dtype(nd_values_by_key)
bounds = LazyArray(b_shape, calc_bounds, b_dtype)
hybrid_pressure = iris.coords.AuxCoord(
points, standard_name=self.standard_name, long_name=self.long_name,
var_name=self.var_name, units=self.units, bounds=bounds,
attributes=self.attributes, coord_system=self.coord_system)
return hybrid_pressure
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSigmaZFactory(AuxCoordFactory):
"""Defines an ocean sigma over z coordinate factory."""
def __init__(self, sigma=None, eta=None, depth=None,
depth_c=None, nsigma=None, zlev=None):
"""
Creates a ocean sigma over z coordinate factory with the formula:
if k < nsigma:
z(n, k, j, i) = eta(n, j, i) + sigma(k) *
(min(depth_c, depth(j, i)) + eta(n, j, i))
if k >= nsigma:
z(n, k, j, i) = zlev(k)
The `zlev` and 'nsigma' coordinates must be provided, and at least
either `eta`, or 'sigma' and `depth` and `depth_c` coordinates.
"""
super(OceanSigmaZFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev)
self.sigma = sigma
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.nsigma = nsigma
self.zlev = zlev
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.zlev.units
@staticmethod
def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev):
# Check for sufficient factory coordinates.
if zlev is None:
raise ValueError('Unable to determine units: '
'no zlev coordinate available.')
if nsigma is None:
raise ValueError('Missing nsigma coordinate.')
if eta is None and (sigma is None or depth_c is None or
depth is None):
msg = 'Unable to construct ocean sigma over z coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
for coord, term in ((sigma, 'sigma'), (zlev, 'zlev')):
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if sigma and sigma.nbounds != zlev.nbounds:
msg = 'The sigma coordinate {!r} and zlev coordinate {!r} ' \
'must be equally bounded.'.format(sigma.name(), zlev.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'),
(depth_c, 'depth_c'), (nsigma, 'nsigma'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
for coord, term in ((depth_c, 'depth_c'), (nsigma, 'nsigma')):
if coord is not None and coord.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
if not zlev.units.is_convertible('m'):
msg = 'Invalid units: zlev coordinate {!r} ' \
'must have units of distance.'.format(zlev.name())
raise ValueError(msg)
if sigma is not None and not sigma.units.is_dimensionless():
msg = 'Invalid units: sigma coordinate {!r} ' \
'must be dimensionless.'.format(sigma.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth_c, 'depth_c'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.units != zlev.units:
msg = 'Incompatible units: {} coordinate {!r} and zlev ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), zlev.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(sigma=self.sigma, eta=self.eta, depth=self.depth,
depth_c=self.depth_c, nsigma=self.nsigma, zlev=self.zlev)
def _derive(self, sigma, eta, depth, depth_c,
nsigma, zlev, shape, nsigma_slice):
# Perform the ocean sigma over z coordinate nsigma slice.
if eta.ndim:
eta = eta[nsigma_slice]
if sigma.ndim:
sigma = sigma[nsigma_slice]
if depth.ndim:
depth = depth[nsigma_slice]
# Note that, this performs a point-wise minimum.
temp = eta + sigma * (np.minimum(depth_c, depth) + eta)
# Calculate the final derived result.
result = np.ones(shape, dtype=temp.dtype) * zlev
result[nsigma_slice] = temp
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimesions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
points_dtype = self._dtype(nd_points_by_key, shape=(), nsigma_slice=())
# Calculate the nsigma slice.
nsigma_slice = [slice(None)] * len(derived_dims)
dim, = dependency_dims['zlev']
index = derived_dims.index(dim)
nsigma_slice[index] = slice(0, int(nd_points_by_key['nsigma']))
nsigma_slice = tuple(nsigma_slice)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['sigma'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'],
nd_points_by_key['nsigma'],
nd_points_by_key['zlev'],
points_shape,
nsigma_slice)
points = LazyArray(points_shape, calc_points, points_dtype)
bounds = None
if self.zlev.nbounds or (self.sigma and self.sigma.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
bounds_dtype = self._dtype(nd_values_by_key, shape=(),
nsigma_slice=())
nsigma_slice_bounds = nsigma_slice + (slice(None),)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
for key in ('sigma', 'zlev'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c', 'nsigma'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['sigma'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
nd_values_by_key['nsigma'],
nd_values_by_key['zlev'],
bounds_shape,
nsigma_slice_bounds)
bounds = LazyArray(bounds_shape, calc_bounds, bounds_dtype)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSigmaFactory(AuxCoordFactory):
"""Defines an ocean sigma coordinate factory."""
def __init__(self, sigma=None, eta=None, depth=None):
"""
Creates an ocean sigma coordinate factory with the formula:
z(n, k, j, i) = eta(n, j, i) + sigma(k) *
(depth(j, i) + eta(n, j, i))
"""
super(OceanSigmaFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth)
self.sigma = sigma
self.eta = eta
self.depth = depth
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(sigma, eta, depth):
# Check for sufficient factory coordinates.
if eta is None or sigma is None or depth is None:
msg = 'Unable to construct ocean sigma coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coord, term = (sigma, 'sigma')
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
if sigma is not None and not sigma.units.is_dimensionless():
msg = 'Invalid units: sigma coordinate {!r} ' \
'must be dimensionless.'.format(sigma.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(sigma=self.sigma, eta=self.eta, depth=self.depth)
def _derive(self, sigma, eta, depth):
result = eta + sigma * (depth + eta)
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['sigma'],
nd_points_by_key['eta'],
nd_points_by_key['depth'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.sigma and self.sigma.nbounds:
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 'sigma'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['sigma'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSg1Factory(AuxCoordFactory):
"""Defines an Ocean s-coordinate, generic form 1 factory."""
def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
"""
Creates an Ocean s-coordinate, generic form 1 factory with the formula:
z(n,k,j,i) = S(k,j,i) + eta(n,j,i) * (1 + S(k,j,i) / depth(j,i))
where:
S(k,j,i) = depth_c * s(k) + (depth(j,i) - depth_c) * C(k)
"""
super(OceanSg1Factory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
self.s = s
self.c = c
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or c is None or
depth is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate, generic form 1 ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if s and s.nbounds != c.nbounds:
msg = 'The s coordinate {!r} and c coordinate {!r} ' \
'must be equally bounded.'.format(s.name(), c.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
if depth_c is not None and depth_c.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and not coord.units.is_dimensionless():
msg = 'Invalid units: {} coordinate {!r} ' \
'must be dimensionless.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, c=self.c, eta=self.eta, depth=self.depth,
depth_c=self.depth_c)
def _derive(self, s, c, eta, depth, depth_c):
S = depth_c * s + (depth - depth_c) * c
result = S + eta * (1 + S / depth)
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['c'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds or (self.c and self.c.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['c'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSFactory(AuxCoordFactory):
"""Defines an Ocean s-coordinate factory."""
def __init__(self, s=None, eta=None, depth=None, a=None, b=None,
depth_c=None):
"""
Creates an Ocean s-coordinate factory with the formula:
z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) +
(depth(j,i)-depth_c)*C(k)
where:
C(k) = (1-b) * sinh(a*s(k)) / sinh(a) +
b * [tanh(a * (s(k) + 0.5)) / (2 * tanh(0.5*a)) - 0.5]
"""
super(OceanSFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, eta, depth, a, b, depth_c)
self.s = s
self.eta = eta
self.depth = depth
self.a = a
self.b = b
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, eta, depth, a, b, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or depth is None or
a is None or b is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
if s is not None and s.nbounds not in (0, 2):
msg = 'Invalid s coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(s.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
coords = ((a, 'a'), (b, 'b'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
if s is not None and not s.units.is_dimensionless():
msg = 'Invalid units: s coordinate {!r} ' \
'must be dimensionless.'.format(s.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, eta=self.eta, depth=self.depth, a=self.a,
b=self.b, depth_c=self.depth_c)
def _derive(self, s, eta, depth, a, b, depth_c):
c = ((1 - b) * np.sinh(a * s) / np.sinh(a) + b *
(np.tanh(a * (s + 0.5)) / (2 * np.tanh(0.5 * a)) - 0.5))
result = eta * (1 + s) + depth_c * s + (depth - depth_c) * c
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['a'],
nd_points_by_key['b'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds:
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'a', 'b', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['a'],
nd_values_by_key['b'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSg2Factory(AuxCoordFactory):
"""Defines an Ocean s-coordinate, generic form 2 factory."""
def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
"""
Creates an Ocean s-coordinate, generic form 2 factory with the formula:
z(n,k,j,i) = eta(n,j,i) + (eta(n,j,i) + depth(j,i)) * S(k,j,i)
where:
S(k,j,i) = (depth_c * s(k) + depth(j,i) * C(k)) /
(depth_c + depth(j,i))
"""
super(OceanSg2Factory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
self.s = s
self.c = c
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or c is None or
depth is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate, generic form 2 ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if s and s.nbounds != c.nbounds:
msg = 'The s coordinate {!r} and c coordinate {!r} ' \
'must be equally bounded.'.format(s.name(), c.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
if depth_c is not None and depth_c.shape != (1,):
msg = 'Expected scalar depth_c coordinate {!r}: ' \
'got shape {!r}.'.format(depth_c.name(), depth_c.shape)
raise ValueError(msg)
# Check units.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and not coord.units.is_dimensionless():
msg = 'Invalid units: {} coordinate {!r} ' \
'must be dimensionless.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, c=self.c, eta=self.eta, depth=self.depth,
depth_c=self.depth_c)
def _derive(self, s, c, eta, depth, depth_c):
S = (depth_c * s + depth * c) / (depth_c + depth)
result = eta + (eta + depth) * S
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['c'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds or (self.c and self.c.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['c'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
| lgpl-3.0 |
hynnet/openwrt-mt7620 | staging_dir/host/lib/python2.7/test/test_linecache.py | 96 | 4079 | """ Tests for the linecache module """
import linecache
import unittest
import os.path
from test import test_support as support
FILENAME = linecache.__file__
INVALID_NAME = '!@$)(!@#_1'
EMPTY = ''
TESTS = 'inspect_fodder inspect_fodder2 mapping_tests'
TESTS = TESTS.split()
TEST_PATH = os.path.dirname(support.__file__)
MODULES = "linecache abc".split()
MODULE_PATH = os.path.dirname(FILENAME)
SOURCE_1 = '''
" Docstring "
def function():
return result
'''
SOURCE_2 = '''
def f():
return 1 + 1
a = f()
'''
SOURCE_3 = '''
def f():
return 3''' # No ending newline
class LineCacheTests(unittest.TestCase):
def test_getline(self):
getline = linecache.getline
# Bad values for line number should return an empty string
self.assertEqual(getline(FILENAME, 2**15), EMPTY)
self.assertEqual(getline(FILENAME, -1), EMPTY)
# Float values currently raise TypeError, should it?
self.assertRaises(TypeError, getline, FILENAME, 1.1)
# Bad filenames should return an empty string
self.assertEqual(getline(EMPTY, 1), EMPTY)
self.assertEqual(getline(INVALID_NAME, 1), EMPTY)
# Check whether lines correspond to those from file iteration
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
# Check module loading
for entry in MODULES:
filename = os.path.join(MODULE_PATH, entry) + '.py'
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
# Check that bogus data isn't returned (issue #1309567)
empty = linecache.getlines('a/b/c/__init__.py')
self.assertEqual(empty, [])
def test_no_ending_newline(self):
self.addCleanup(support.unlink, support.TESTFN)
with open(support.TESTFN, "w") as fp:
fp.write(SOURCE_3)
lines = linecache.getlines(support.TESTFN)
self.assertEqual(lines, ["\n", "def f():\n", " return 3\n"])
def test_clearcache(self):
cached = []
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
cached.append(filename)
linecache.getline(filename, 1)
# Are all files cached?
cached_empty = [fn for fn in cached if fn not in linecache.cache]
self.assertEqual(cached_empty, [])
# Can we clear the cache?
linecache.clearcache()
cached_empty = [fn for fn in cached if fn in linecache.cache]
self.assertEqual(cached_empty, [])
def test_checkcache(self):
getline = linecache.getline
# Create a source file and cache its contents
source_name = support.TESTFN + '.py'
self.addCleanup(support.unlink, source_name)
with open(source_name, 'w') as source:
source.write(SOURCE_1)
getline(source_name, 1)
# Keep a copy of the old contents
source_list = []
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
with open(source_name, 'w') as source:
source.write(SOURCE_2)
# Try to update a bogus cache entry
linecache.checkcache('dummy')
# Check that the cache matches the old contents
for index, line in enumerate(source_list):
self.assertEqual(line, getline(source_name, index + 1))
# Update the cache and check whether it matches the new source file
linecache.checkcache(source_name)
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
def test_main():
support.run_unittest(LineCacheTests)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
hirobert/svgwrite | svgwrite/utils.py | 1 | 6106 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman
# Purpose: svg util functions and classes
# Created: 08.09.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
"""
.. autofunction:: rgb
.. autofunction:: iterflatlist
.. autofunction:: strlist
.. autofunction:: get_unit
.. autofunction:: split_coordinate
.. autofunction:: split_angle
.. autofunction:: rect_top_left_corner
"""
import sys
PYTHON3 = sys.version_info[0] > 2
# Python 3 adaption
if PYTHON3:
to_unicode = str
basestring = str
else:
to_unicode = unicode
# Python 3 adaption
def is_string(value):
return isinstance(value, basestring)
from svgwrite.data import pattern
def rgb(r=0, g=0, b=0, mode='RGB'):
"""
Convert **r**, **g**, **b** values to a `string`.
:param r: red part
:param g: green part
:param b: blue part
:param string mode: ``'RGB | %'``
:rtype: string
========= =============================================================
mode Description
========= =============================================================
``'RGB'`` returns a rgb-string format: ``'rgb(r, g, b)'``
``'%'`` returns percent-values as rgb-string format: ``'rgb(r%, g%, b%)'``
========= =============================================================
"""
def percent(value):
value = int(value)
if value < 0:
value = 0
if value > 100:
value = 100
return value
if mode.upper() == 'RGB':
return "rgb(%d,%d,%d)" % (int(r) & 255, int(g) & 255, int(b) & 255)
elif mode == "%":
# see http://www.w3.org/TR/SVG11/types.html#DataTypeColor
# percentage is an 'integer' value
return "rgb(%d%%,%d%%,%d%%)" % (percent(r), percent(g), percent(b))
else:
raise ValueError("Invalid mode '%s'" % mode)
def iterflatlist(values):
"""
Flatten nested *values*, returns an `iterator`.
"""
for element in values:
if hasattr(element, "__iter__") and not is_string(element):
for item in iterflatlist(element):
yield item
else:
yield element
def strlist(values, seperator=","):
"""
Concatenate **values** with **sepertator**, `None` values will be excluded.
:param values: `iterable` object
:returns: `string`
"""
if is_string(values):
return values
else:
return seperator.join([str(value) for value in iterflatlist(values) if value is not None])
def get_unit(coordinate):
"""
Get the `unit` identifier of **coordinate**, if **coordinate** has a valid
`unit` identifier appended, else returns `None`.
"""
if isinstance(coordinate, (int, float)):
return None
result = pattern.coordinate.match(coordinate)
if result:
return result.group(3)
else:
raise ValueError("Invalid format: '%s'" % coordinate)
def split_coordinate(coordinate):
"""
Split coordinate into `<number>` and 'unit` identifier.
:returns: <2-tuple> (number, unit-identifier) or (number, None) if no unit-identifier
is present or coordinate is an int or float.
"""
if isinstance(coordinate, (int, float)):
return (float(coordinate), None)
result = pattern.coordinate.match(coordinate)
if result:
return (float(result.group(1)), result.group(3))
else:
raise ValueError("Invalid format: '%s'" % coordinate)
def split_angle(angle):
"""
Split angle into `<number>` and `<angle>` identifier.
:returns: <2-tuple> (number, angle-identifier) or (number, None) if no angle-identifier
is present or angle is an int or float.
"""
if isinstance(angle, (int, float)):
return (float(angle), None)
result = pattern.angle.match(angle)
if result:
return (float(result.group(1)), result.group(3))
else:
raise ValueError("Invalid format: '%s'" % angle)
def rect_top_left_corner(insert, size, pos='top-left'):
"""
Calculate top-left corner of a rectangle.
**insert** and **size** must have the same units.
:param 2-tuple insert: insert point
:param 2-tuple size: (width, height)
:param string pos: insert position ``'vert-horiz'``
:return: ``'top-left'`` corner of the rect
:rtype: 2-tuple
========== ==============================
pos valid values
========== ==============================
**vert** ``'top | middle | bottom'``
**horiz** ``'left'|'center'|'right'``
========== ==============================
"""
vert, horiz = pos.lower().split('-')
x, xunit = split_coordinate(insert[0])
y, yunit = split_coordinate(insert[1])
width, wunit = split_coordinate(size[0])
height, hunit = split_coordinate(size[1])
if xunit != wunit:
raise ValueError("x-coordinate and width has to have the same unit")
if yunit != hunit:
raise ValueError("y-coordinate and height has to have the same unit")
if horiz == 'center':
x = x - width / 2.
elif horiz == 'right':
x = x - width
elif horiz != 'left':
raise ValueError("Invalid horizontal position: '%s'" % horiz)
if vert == 'middle':
y = y - height / 2.
elif vert == 'bottom':
y = y - height
elif vert != 'top':
raise ValueError("Invalid vertical position: '%s'" % vert)
if xunit:
x = "%s%s" %(x, xunit)
if yunit:
y = "%s%s" %(y, yunit)
return (x, y)
class AutoID(object):
_nextid = 1
def __init__(self, value=None):
self._set_value(value)
@classmethod
def _set_value(cls, value=None):
if value is not None:
cls._nextid = value
@classmethod
def next_id(cls, value=None):
cls._set_value(value)
retval = "id%d" % cls._nextid
cls._nextid += 1
return retval
| gpl-3.0 |
AlvinPH/StockTool | StockTool/core.py | 1 | 7480 |
from . import helpers
import pandas as pd
import numpy as np
from pandas import DataFrame, Series
from pandas_datareader import data
from datetime import datetime, timedelta
import re
import os
import requests
import time
class StockInfo():
def __init__(self, StockNumber):
if isinstance(StockNumber, str) is False:
print('StockNumber must be string')
self.__StockNumber = '2330.TW'
else:
self.__StockNumber = StockNumber+'.TW'
def get_StockNumber(self):
return self.__StockNumber
def fetch_StockPrice(self, StartTime, EndTime):
# self.__StockPrice = data.DataReader(self.__StockNumber,
# 'yahoo',StartTime, EndTime)
self.__StockPrice = data.DataReader(self.__StockNumber,
'yahoo',StartTime, EndTime)
def get_StockPrice(self):
return self.__StockPrice
def fetch_StockActions(self, StartTime, EndTime):
self.__StockActions = data.DataReader(self.__StockNumber,
'yahoo-actions',StartTime, EndTime)
def get_StockActions(self):
return self.__StockActions
class Crawler():
def __init__(self, prefix='data'):
if not os.path.isdir(prefix):
os.mkdir(prefix)
self.prefix = prefix
# pass
def get_tse_one_day(self, spec_date):
date_str = '{0}{1:02d}{2:02d}'.format(spec_date.year, spec_date.month, spec_date.day)
url = 'http://www.twse.com.tw/exchangeReport/MI_INDEX'
query_params = {
'date': date_str,
'response': 'json',
'type': 'ALL',
'_': str(round(time.time() * 1000) - 500)
}
# Get json data
page = requests.get(url, params=query_params)
if not page.ok:
logging.error("Can not get TSE data at {}".format(date_str))
content = page.json()
# print(content)
# key = 'Nodata'
isoffday = True
for key in content.keys():
if isinstance(content[key], list):
if len(content[key][0]) == 16:
isoffday = False
break
if isoffday:
print('No data at this day %4d/%02d/%02d'%
(spec_date.year,spec_date.month, spec_date.day))
return -1
# For compatible with original data
# date_str_mingguo = '{0}/{1:02d}/{2:02d}'.format(spec_date.year - 1911,\
# spec_date.month, spec_date.day)
data_df = DataFrame(data=content[key],
columns=['code','name','volume','transaction','turnover',
'open','high','low','close','UD','difference',
'last_buy', 'last_buy_volume',
'last_sell','last_sell_volume','PE_ratio'])
data_df = data_df.applymap(lambda x: re.sub(",","",x))# clear comma
data_df.replace({'UD':{'<p style= color:red>+</p>':'+',
'<p style= color:green>-</p>':'-'}},
inplace=True)
return data_df
def get_otc_one_day(self, spec_date):
date_str = '{0}/{1:02d}/{2:02d}'.format(spec_date.year-1911, spec_date.month, spec_date.day)
ttime = str(int(time.time()*100))
url = 'http://www.tpex.org.tw/web/stock/aftertrading/daily_close_quotes/stk_quote_result.php?l=zh-tw&d={}&_={}'.format(date_str, ttime)
page = requests.get(url)
if not page.ok:
logging.error("Can not get OTC data at {}".format(date_str))
# print(page.content)
content = page.json()
# print(content)
# key = 'Nodata'
if (len(content['aaData']) + len(content['mmData'])) == 0:
print('No data at this day ' + date_str)
return -1
data_df = DataFrame(data=content['aaData'] + content['mmData'],
columns=['code','name','close','difference','open',
'high','low','avg','volume','turnover',
'transaction','last_buy',
'last_sell','NumOfShare','NextRefPrice',
'NextUpperPrice', 'NextLowerPrice'])
data_df = data_df.applymap(lambda x: re.sub(",","",x))# clear comma
return data_df
def check_all_tse_data(self):
Filelist = os.listdir(self.prefix)
if 'offday.xlsx' in Filelist:
offday_ser = pd.read_excel(self.prefix + '/offday.xlsx')
offday_ser = offday_ser['date'].copy()
else:
offday_ser = Series(name='date', data='First')
offday_update = False
lastday_update = False
Now = datetime.now()
Nowdate = datetime(Now.year, Now.month, Now.day)
if 'lastday.txt' in Filelist:
with open(self.prefix + '/lastday.txt', 'r') as f:
read_data = f.read()
f.close()
Startdate = datetime(int(read_data[0:4]),
int(read_data[4:6]),
int(read_data[6:8]))
else:
#Start from 2004(093)/02/11
Startdate = datetime(2004, 2, 11)
datediff = timedelta(days=1)
while Startdate <= Nowdate:
date_str = '{0}{1:02d}{2:02d}'.\
format(Startdate.year-1911,Startdate.month, Startdate.day)
print('Read ' + date_str)
if ('%s.xlsx' %(date_str)) not in Filelist:# not in FileList
if (offday_ser != date_str).all():# not a offday
lastday_update = True
data_df = self.get_tse_one_day(Startdate) # collect data
if isinstance(data_df, DataFrame):# success
data_df.to_excel('{0}/{1}.xlsx'.format(self.prefix,date_str))# save data
else:# is an offday, update offday series
offday_ser.set_value( len(offday_ser), date_str)
offday_update = True
print(date_str + 'is an offday')
else:
print(date_str + ' is known as an offday')
else:
print(date_str + ' is in FileList')
Startdate = Startdate + datediff
if offday_update:
offday_ser.to_excel(self.prefix + '/offday.xlsx')
if lastday_update:
with open(self.prefix + '/lastday.txt', 'w') as f:
# Nowdate += timedelta(days=-1)
date_str = '{0}{1:02d}{2:02d}'.\
format(Nowdate.year,Nowdate.month, Nowdate.day)
f.write(date_str)
f.close()
def check_all_otc_data(self):
Filelist = os.listdir(self.prefix)
if 'offdayOTC.xlsx' in Filelist:
offday_ser = pd.read_excel(self.prefix + '/offdayOTC.xlsx')
offday_ser = offday_ser['date'].copy()
else:
offday_ser = Series(name='date', data='First')
offday_update = False
lastday_update = False
Now = datetime.now()
Nowdate = datetime(Now.year, Now.month, Now.day)
if 'lastdayOTC.txt' in Filelist:
with open(self.prefix + '/lastdayOTC.txt', 'r') as f:
read_data = f.read()
f.close()
Startdate = datetime(int(read_data[0:4]),
int(read_data[4:6]),
int(read_data[6:8]))
else:
#Start from 2007(096)/04/23
Startdate = datetime(2007, 4, 23)
# Startdate = datetime(2008, 2, 28)
datediff = timedelta(days=1)
while Startdate <= Nowdate:
date_str = '{0}{1:02d}{2:02d}'.\
format(Startdate.year-1911,Startdate.month, Startdate.day)
print('Read ' + date_str + ' OTC')
if ('%sOTC.xlsx' %(date_str)) not in Filelist:# not in FileList
if (offday_ser != date_str).all():# not a offday
lastday_update = True
time.sleep(np.random.random())
data_df = self.get_otc_one_day(Startdate) # collect data
if isinstance(data_df, DataFrame):# success
data_df.to_excel('{0}/{1}OTC.xlsx'.format(self.prefix,date_str))# save data
else:# is an offday, update offday series
offday_ser.set_value( len(offday_ser), date_str)
offday_update = True
print(date_str + 'is an offday')
else:
print(date_str + ' is known as an offday')
else:
print(date_str + ' is in FileList')
Startdate = Startdate + datediff
if offday_update:
offday_ser.to_excel(self.prefix + '/offdayOTC.xlsx')
if lastday_update:
with open(self.prefix + '/lastdayOTC.txt', 'w') as f:
# Nowdate += timedelta(days=-1)
date_str = '{0}{1:02d}{2:02d}'.\
format(Nowdate.year,Nowdate.month, Nowdate.day)
f.write(date_str)
f.close()
| bsd-2-clause |
dtroyer/python-openstacksdk | openstack/tests/unit/clustering/v1/test_event.py | 1 | 2273 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.unit import base
from openstack.clustering.v1 import event
FAKE = {
'action': 'NODE_CREATE',
'cluster_id': None,
'id': 'ffaed25e-46f5-4089-8e20-b3b4722fd597',
'level': '20',
'oid': 'efff1c11-2ada-47da-bedd-2c9af4fd099a',
'oname': 'node_create_b4a49016',
'otype': 'NODEACTION',
'project': '42d9e9663331431f97b75e25136307ff',
'status': 'START',
'status_reason': 'The action was abandoned.',
'timestamp': '2016-10-10T12:46:36.000000',
'user': '5e5bf8027826429c96af157f68dc9072'
}
class TestEvent(base.TestCase):
def setUp(self):
super(TestEvent, self).setUp()
def test_basic(self):
sot = event.Event()
self.assertEqual('event', sot.resource_key)
self.assertEqual('events', sot.resources_key)
self.assertEqual('/events', sot.base_path)
self.assertEqual('clustering', sot.service.service_type)
self.assertTrue(sot.allow_get)
self.assertTrue(sot.allow_list)
def test_instantiate(self):
sot = event.Event(**FAKE)
self.assertEqual(FAKE['id'], sot.id)
self.assertEqual(FAKE['action'], sot.action)
self.assertEqual(FAKE['cluster_id'], sot.cluster_id)
self.assertEqual(FAKE['level'], sot.level)
self.assertEqual(FAKE['oid'], sot.obj_id)
self.assertEqual(FAKE['oname'], sot.obj_name)
self.assertEqual(FAKE['otype'], sot.obj_type)
self.assertEqual(FAKE['project'], sot.project_id)
self.assertEqual(FAKE['status'], sot.status)
self.assertEqual(FAKE['status_reason'], sot.status_reason)
self.assertEqual(FAKE['timestamp'], sot.generated_at)
self.assertEqual(FAKE['user'], sot.user_id)
| apache-2.0 |
HousekeepLtd/django | tests/test_client/tests.py | 71 | 31030 | # -*- coding: utf-8 -*-
"""
Testing using the Test Client
The test client is a class that can act like a simple
browser for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
``Client`` objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the ``Client`` instance.
This is not intended as a replacement for Twill, Selenium, or
other browser automation frameworks - it is here to allow
testing against the contexts and templates produced by a view,
rather than the HTML rendered to the end-user.
"""
from __future__ import unicode_literals
import datetime
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse
from django.test import (
Client, RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from .views import get_view, post_view, trace_view
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='test_client.urls',)
class ClientTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient',
first_name='Test', last_name='Client', email='testclient@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u2 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive',
first_name='Inactive', last_name='User', email='testclient@example.com', is_staff=False, is_active=False,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u3 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff',
first_name='Staff', last_name='Member', email='testclient@example.com', is_staff=True, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
def test_get_view(self):
"GET a view"
# The data is ignored, but let's check it doesn't crash the system
# anyway.
data = {'var': '\xf2'}
response = self.client.get('/get_view/', data)
# Check some response details
self.assertContains(response, 'This is a test')
self.assertEqual(response.context['var'], '\xf2')
self.assertEqual(response.templates[0].name, 'GET Template')
def test_get_post_view(self):
"GET a view that normally expects POSTs"
response = self.client.get('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty GET Template')
self.assertTemplateNotUsed(response, 'Empty POST Template')
def test_empty_post(self):
"POST an empty dictionary to a view"
response = self.client.post('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty POST Template')
self.assertTemplateNotUsed(response, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty POST Template')
def test_post(self):
"POST some data to a view"
post_data = {
'value': 37
}
response = self.client.post('/post_view/', post_data)
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['data'], '37')
self.assertEqual(response.templates[0].name, 'POST Template')
self.assertContains(response, 'Data received')
def test_trace(self):
"""TRACE a view"""
response = self.client.trace('/trace_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['method'], 'TRACE')
self.assertEqual(response.templates[0].name, 'TRACE Template')
def test_response_headers(self):
"Check the value of HTTP headers returned in a response"
response = self.client.get("/header_view/")
self.assertEqual(response['X-DJANGO-TEST'], 'Slartibartfast')
def test_response_attached_request(self):
"""
Check that the returned response has a ``request`` attribute with the
originating environ dict and a ``wsgi_request`` with the originating
``WSGIRequest`` instance.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, 'request'))
self.assertTrue(hasattr(response, 'wsgi_request'))
for key, value in response.request.items():
self.assertIn(key, response.wsgi_request.environ)
self.assertEqual(response.wsgi_request.environ[key], value)
def test_response_resolver_match(self):
"""
The response contains a ResolverMatch instance.
"""
response = self.client.get('/header_view/')
self.assertTrue(hasattr(response, 'resolver_match'))
def test_response_resolver_match_redirect_follow(self):
"""
The response ResolverMatch instance contains the correct
information when following redirects.
"""
response = self.client.get('/redirect_view/', follow=True)
self.assertEqual(response.resolver_match.url_name, 'get_view')
def test_response_resolver_match_regular_view(self):
"""
The response ResolverMatch instance contains the correct
information when accessing a regular view.
"""
response = self.client.get('/get_view/')
self.assertEqual(response.resolver_match.url_name, 'get_view')
def test_raw_post(self):
"POST raw data (with a content type) to a view"
test_doc = """<?xml version="1.0" encoding="utf-8"?>
<library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library>
"""
response = self.client.post("/raw_post_view/", test_doc,
content_type="text/xml")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Book template")
self.assertEqual(response.content, b"Blink - Malcolm Gladwell")
def test_insecure(self):
"GET a URL through http"
response = self.client.get('/secure_view/', secure=False)
self.assertFalse(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '80')
def test_secure(self):
"GET a URL through https"
response = self.client.get('/secure_view/', secure=True)
self.assertTrue(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '443')
def test_redirect(self):
"GET a URL that redirects elsewhere"
response = self.client.get('/redirect_view/')
# Check that the response was a 302 (redirect)
self.assertRedirects(response, '/get_view/')
def test_redirect_with_query(self):
"GET a URL that redirects with given GET parameters"
response = self.client.get('/redirect_view/', {'var': 'value'})
# Check if parameters are intact
self.assertRedirects(response, '/get_view/?var=value')
def test_permanent_redirect(self):
"GET a URL that redirects permanently elsewhere"
response = self.client.get('/permanent_redirect_view/')
# Check that the response was a 301 (permanent redirect)
self.assertRedirects(response, '/get_view/', status_code=301)
def test_temporary_redirect(self):
"GET a URL that does a non-permanent redirect"
response = self.client.get('/temporary_redirect_view/')
# Check that the response was a 302 (non-permanent redirect)
self.assertRedirects(response, '/get_view/', status_code=302)
def test_redirect_to_strange_location(self):
"GET a URL that redirects to a non-200 page"
response = self.client.get('/double_redirect_view/')
# Check that the response was a 302, and that
# the attempt to get the redirection location returned 301 when retrieved
self.assertRedirects(response, '/permanent_redirect_view/', target_status_code=301)
def test_follow_redirect(self):
"A URL that redirects can be followed to termination."
response = self.client.get('/double_redirect_view/', follow=True)
self.assertRedirects(response, '/get_view/', status_code=302, target_status_code=200)
self.assertEqual(len(response.redirect_chain), 2)
def test_redirect_http(self):
"GET a URL that redirects to an http URI"
response = self.client.get('/http_redirect_view/', follow=True)
self.assertFalse(response.test_was_secure_request)
def test_redirect_https(self):
"GET a URL that redirects to an https URI"
response = self.client.get('/https_redirect_view/', follow=True)
self.assertTrue(response.test_was_secure_request)
def test_notfound_response(self):
"GET a URL that responds as '404:Not Found'"
response = self.client.get('/bad_view/')
# Check that the response was a 404, and that the content contains MAGIC
self.assertContains(response, 'MAGIC', status_code=404)
def test_valid_form(self):
"POST valid data to a form"
post_data = {
'text': 'Hello World',
'email': 'foo@example.com',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Valid POST Template")
def test_valid_form_with_hints(self):
"GET a form, providing hints in the GET data"
hints = {
'text': 'Hello World',
'multi': ('b', 'c', 'e')
}
response = self.client.get('/form_view/', data=hints)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Form GET Template")
# Check that the multi-value data has been rolled out ok
self.assertContains(response, 'Select a valid choice.', 0)
def test_incomplete_data_form(self):
"POST incomplete data to a form"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view/', post_data)
self.assertContains(response, 'This field is required.', 3)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error(self):
"POST erroneous data to a form"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_valid_form_with_template(self):
"POST valid data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': 'foo@example.com',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data OK')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Valid POST Template")
def test_incomplete_data_form_with_template(self):
"POST incomplete data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, 'form_view.html')
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error_with_template(self):
"POST erroneous data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_unknown_page(self):
"GET an invalid URL"
response = self.client.get('/unknown_view/')
# Check that the response was a 404
self.assertEqual(response.status_code, 404)
def test_url_parameters(self):
"Make sure that URL ;-parameters are not stripped."
response = self.client.get('/unknown_view/;some-parameter')
# Check that the path in the response includes it (ignore that it's a 404)
self.assertEqual(response.request['PATH_INFO'], '/unknown_view/;some-parameter')
def test_view_with_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_force_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_force_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_login_and_custom_redirect(self):
"Request a page that is protected with @login_required(redirect_field_name='redirect_to')"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_force_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_bad_login(self):
"Request a page that is protected with @login, but use bad credentials"
login = self.client.login(username='otheruser', password='nopassword')
self.assertFalse(login)
def test_view_with_inactive_login(self):
"Request a page that is protected with @login, but use an inactive login"
login = self.client.login(username='inactive', password='password')
self.assertFalse(login)
def test_view_with_inactive_force_login(self):
"Request a page that is protected with @login, but use an inactive login"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u2)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'inactive')
def test_logout(self):
"Request a logout after logging in"
# Log in
self.client.login(username='testclient', password='password')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
def test_logout_with_force_login(self):
"Request a logout after logging in"
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'test_client.auth_backends.TestClientBackend',
],
)
def test_force_login_with_backend(self):
"""
Request a page that is protected with @login_required when using
force_login() and passing a backend.
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1, backend='test_client.auth_backends.TestClientBackend')
self.assertEqual(self.u1.backend, 'test_client.auth_backends.TestClientBackend')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies")
def test_logout_cookie_sessions(self):
self.test_logout()
def test_view_with_permissions(self):
"Request a page that is protected with @permission_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# TODO: Log in with right permissions and request the page again
def test_view_with_permissions_exception(self):
"Request a page that is protected with @permission_required but raises an exception"
# Get the page without logging in. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
def test_view_with_method_permissions(self):
"Request a page that is protected with a @permission_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# TODO: Log in with right permissions and request the page again
def test_external_redirect(self):
response = self.client.get('/django_project_redirect/')
self.assertRedirects(response, 'https://www.djangoproject.com/', fetch_redirect_response=False)
def test_session_modifying_view(self):
"Request a page that modifies the session"
# Session value isn't set initially
try:
self.client.session['tobacconist']
self.fail("Shouldn't have a session value")
except KeyError:
pass
self.client.post('/session_view/')
# Check that the session was modified
self.assertEqual(self.client.session['tobacconist'], 'hovercraft')
def test_view_with_exception(self):
"Request a page that is known to throw an error"
self.assertRaises(KeyError, self.client.get, "/broken_view/")
# Try the same assertion, a different way
try:
self.client.get('/broken_view/')
self.fail('Should raise an error')
except KeyError:
pass
def test_mail_sending(self):
"Test that mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Test message')
self.assertEqual(mail.outbox[0].body, 'This is a test email')
self.assertEqual(mail.outbox[0].from_email, 'from@example.com')
self.assertEqual(mail.outbox[0].to[0], 'first@example.com')
self.assertEqual(mail.outbox[0].to[1], 'second@example.com')
def test_reverse_lazy_decodes(self):
"Ensure reverse_lazy works in the test client"
data = {'var': 'data'}
response = self.client.get(reverse_lazy('get_view'), data)
# Check some response details
self.assertContains(response, 'This is a test')
def test_mass_mail_sending(self):
"Test that mass mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mass_mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, 'First Test message')
self.assertEqual(mail.outbox[0].body, 'This is the first test email')
self.assertEqual(mail.outbox[0].from_email, 'from@example.com')
self.assertEqual(mail.outbox[0].to[0], 'first@example.com')
self.assertEqual(mail.outbox[0].to[1], 'second@example.com')
self.assertEqual(mail.outbox[1].subject, 'Second Test message')
self.assertEqual(mail.outbox[1].body, 'This is the second test email')
self.assertEqual(mail.outbox[1].from_email, 'from@example.com')
self.assertEqual(mail.outbox[1].to[0], 'second@example.com')
self.assertEqual(mail.outbox[1].to[1], 'third@example.com')
def test_exception_following_nested_client_request(self):
"""
A nested test client request shouldn't clobber exception signals from
the outer client request.
"""
with self.assertRaisesMessage(Exception, 'exception message'):
self.client.get('/nesting_exception_view/')
@override_settings(
MIDDLEWARE_CLASSES=['django.middleware.csrf.CsrfViewMiddleware'],
ROOT_URLCONF='test_client.urls',
)
class CSRFEnabledClientTests(SimpleTestCase):
def test_csrf_enabled_client(self):
"A client can be instantiated with CSRF checks enabled"
csrf_client = Client(enforce_csrf_checks=True)
# The normal client allows the post
response = self.client.post('/post_view/', {})
self.assertEqual(response.status_code, 200)
# The CSRF-enabled client rejects it
response = csrf_client.post('/post_view/', {})
self.assertEqual(response.status_code, 403)
class CustomTestClient(Client):
i_am_customized = "Yes"
class CustomTestClientTest(SimpleTestCase):
client_class = CustomTestClient
def test_custom_test_client(self):
"""A test case can specify a custom class for self.client."""
self.assertEqual(hasattr(self.client, "i_am_customized"), True)
_generic_view = lambda request: HttpResponse(status=200)
@override_settings(ROOT_URLCONF='test_client.urls')
class RequestFactoryTest(SimpleTestCase):
"""Tests for the request factory."""
# A mapping between names of HTTP/1.1 methods and their test views.
http_methods_and_views = (
('get', get_view),
('post', post_view),
('put', _generic_view),
('patch', _generic_view),
('delete', _generic_view),
('head', _generic_view),
('options', _generic_view),
('trace', trace_view),
)
def setUp(self):
self.request_factory = RequestFactory()
def test_request_factory(self):
"""The request factory implements all the HTTP/1.1 methods."""
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method('/somewhere/')
response = view(request)
self.assertEqual(response.status_code, 200)
def test_get_request_from_factory(self):
"""
The request factory returns a templated response for a GET request.
"""
request = self.request_factory.get('/somewhere/')
response = get_view(request)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'This is a test')
def test_trace_request_from_factory(self):
"""The request factory returns an echo response for a TRACE request."""
url_path = '/somewhere/'
request = self.request_factory.trace(url_path)
response = trace_view(request)
protocol = request.META["SERVER_PROTOCOL"]
echoed_request_line = "TRACE {} {}".format(url_path, protocol)
self.assertEqual(response.status_code, 200)
self.assertContains(response, echoed_request_line)
| bsd-3-clause |
ephoning/heroku-buildpack-python | vendor/pip-1.3.1/pip/commands/__init__.py | 63 | 2137 | """
Package containing all pip commands
"""
from pip.commands.bundle import BundleCommand
from pip.commands.completion import CompletionCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.unzip import UnzipCommand
from pip.commands.zip import ZipCommand
commands = {
BundleCommand.name: BundleCommand,
CompletionCommand.name: CompletionCommand,
FreezeCommand.name: FreezeCommand,
HelpCommand.name: HelpCommand,
SearchCommand.name: SearchCommand,
ShowCommand.name: ShowCommand,
InstallCommand.name: InstallCommand,
UninstallCommand.name: UninstallCommand,
UnzipCommand.name: UnzipCommand,
ZipCommand.name: ZipCommand,
ListCommand.name: ListCommand,
}
commands_order = [
InstallCommand,
UninstallCommand,
FreezeCommand,
ListCommand,
ShowCommand,
SearchCommand,
ZipCommand,
UnzipCommand,
BundleCommand,
HelpCommand,
]
def get_summaries(ignore_hidden=True, ordered=True):
"""Yields sorted (command name, command summary) tuples."""
if ordered:
cmditems = _sort_commands(commands, commands_order)
else:
cmditems = commands.items()
for name, command_class in cmditems:
if ignore_hidden and command_class.hidden:
continue
yield (name, command_class.summary)
def get_similar_commands(name):
"""Command name auto-correct."""
from difflib import get_close_matches
close_commands = get_close_matches(name, commands.keys())
if close_commands:
guess = close_commands[0]
else:
guess = False
return guess
def _sort_commands(cmddict, order):
def keyfn(key):
try:
return order.index(key[1])
except ValueError:
# unordered items should come last
return 0xff
return sorted(cmddict.items(), key=keyfn)
| mit |
rtrigoso/somepolymath | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/shell.py | 287 | 15340 | # -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'PowerShellLexer', 'ShellSessionLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
*New in Pygments 0.6.*
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)\s*\b',
Keyword),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
*New in Pygments 1.1.*
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
*New in Pygments 1.6.*
"""
name = 'Shell Session'
aliases = ['shell-session']
filenames = ['*.shell-session']
mimetypes = ['application/x-sh-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
*New in Pygments 0.7.*
"""
name = 'Batchfile'
aliases = ['bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
*New in Pygments 0.10.*
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
*New in Pygments 1.5.*
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1','*.psm1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch throw').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where wait use update unregister undo trace test tee take '
'suspend stop start split sort skip show set send select scroll resume '
'restore restart resolve resize reset rename remove register receive '
'read push pop ping out new move measure limit join invoke import '
'group get format foreach export expand exit enter enable disconnect '
'disable debug cxnew copy convertto convertfrom convert connect '
'complete compare clear checkpoint aggregate add').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
(r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_][a-z0-9_]*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"\$]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
| mit |
codeskyblue/AutomatorX | atx/ext/chromedriver.py | 3 | 2585 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# extension for https://sites.google.com/a/chromium.org/chromedriver/
# Experimental, maybe change in the future
# Created by <hzsunshx> 2017-01-20
from __future__ import absolute_import
import atexit
import six
from selenium import webdriver
if six.PY3:
import subprocess
from urllib.error import URLError
else:
from urllib2 import URLError
import subprocess32 as subprocess
class ChromeDriver(object):
def __init__(self, d, port=9515):
self._d = d
self._port = port
def _launch_webdriver(self):
print("start chromedriver instance")
p = subprocess.Popen(['chromedriver', '--port='+str(self._port)])
try:
p.wait(timeout=2.0)
return False
except subprocess.TimeoutExpired:
return True
def driver(self, package=None, attach=True, activity=None, process=None):
"""
Args:
- package(string): default current running app
- attach(bool): default true, Attach to an already-running app instead of launching the app with a clear data directory
- activity(string): Name of the Activity hosting the WebView.
- process(string): Process name of the Activity hosting the WebView (as given by ps).
If not given, the process name is assumed to be the same as androidPackage.
Returns:
selenium driver
"""
app = self._d.current_app()
capabilities = {
'chromeOptions': {
'androidDeviceSerial': self._d.serial,
'androidPackage': package or app.package,
'androidUseRunningApp': attach,
'androidProcess': process or app.package,
'androidActivity': activity or app.activity,
}
}
try:
dr = webdriver.Remote('http://localhost:%d' % self._port, capabilities)
except URLError:
self._launch_webdriver()
dr = webdriver.Remote('http://localhost:%d' % self._port, capabilities)
# always quit driver when done
atexit.register(dr.quit)
return dr
def windows_kill(self):
subprocess.call(['taskkill', '/F', '/IM', 'chromedriver.exe', '/T'])
if __name__ == '__main__':
import atx
d = atx.connect()
driver = ChromeDriver(d).driver()
elem = driver.find_element_by_link_text(u"登录")
elem.click()
driver.quit()
| apache-2.0 |
pambot/SMSQuery | lib/jinja2/testsuite/lexnparse.py | 402 | 22314 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.lexnparse
~~~~~~~~~~~~~~~~~~~~~~~~~~
All the unittests regarding lexing, parsing and syntax.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment, Template, TemplateSyntaxError, \
UndefinedError, nodes
from jinja2._compat import next, iteritems, text_type, PY2
from jinja2.lexer import Token, TokenStream, TOKEN_EOF, \
TOKEN_BLOCK_BEGIN, TOKEN_BLOCK_END
env = Environment()
# how does a string look like in jinja syntax?
if PY2:
def jinja_string_repr(string):
return repr(string)[1:]
else:
jinja_string_repr = repr
class TokenStreamTestCase(JinjaTestCase):
test_tokens = [Token(1, TOKEN_BLOCK_BEGIN, ''),
Token(2, TOKEN_BLOCK_END, ''),
]
def test_simple(self):
ts = TokenStream(self.test_tokens, "foo", "bar")
assert ts.current.type is TOKEN_BLOCK_BEGIN
assert bool(ts)
assert not bool(ts.eos)
next(ts)
assert ts.current.type is TOKEN_BLOCK_END
assert bool(ts)
assert not bool(ts.eos)
next(ts)
assert ts.current.type is TOKEN_EOF
assert not bool(ts)
assert bool(ts.eos)
def test_iter(self):
token_types = [t.type for t in TokenStream(self.test_tokens, "foo", "bar")]
assert token_types == ['block_begin', 'block_end', ]
class LexerTestCase(JinjaTestCase):
def test_raw1(self):
tmpl = env.from_string('{% raw %}foo{% endraw %}|'
'{%raw%}{{ bar }}|{% baz %}{% endraw %}')
assert tmpl.render() == 'foo|{{ bar }}|{% baz %}'
def test_raw2(self):
tmpl = env.from_string('1 {%- raw -%} 2 {%- endraw -%} 3')
assert tmpl.render() == '123'
def test_balancing(self):
env = Environment('{%', '%}', '${', '}')
tmpl = env.from_string('''{% for item in seq
%}${{'foo': item}|upper}{% endfor %}''')
assert tmpl.render(seq=list(range(3))) == "{'FOO': 0}{'FOO': 1}{'FOO': 2}"
def test_comments(self):
env = Environment('<!--', '-->', '{', '}')
tmpl = env.from_string('''\
<ul>
<!--- for item in seq -->
<li>{item}</li>
<!--- endfor -->
</ul>''')
assert tmpl.render(seq=list(range(3))) == ("<ul>\n <li>0</li>\n "
"<li>1</li>\n <li>2</li>\n</ul>")
def test_string_escapes(self):
for char in u'\0', u'\u2668', u'\xe4', u'\t', u'\r', u'\n':
tmpl = env.from_string('{{ %s }}' % jinja_string_repr(char))
assert tmpl.render() == char
assert env.from_string('{{ "\N{HOT SPRINGS}" }}').render() == u'\u2668'
def test_bytefallback(self):
from pprint import pformat
tmpl = env.from_string(u'''{{ 'foo'|pprint }}|{{ 'bär'|pprint }}''')
assert tmpl.render() == pformat('foo') + '|' + pformat(u'bär')
def test_operators(self):
from jinja2.lexer import operators
for test, expect in iteritems(operators):
if test in '([{}])':
continue
stream = env.lexer.tokenize('{{ %s }}' % test)
next(stream)
assert stream.current.type == expect
def test_normalizing(self):
for seq in '\r', '\r\n', '\n':
env = Environment(newline_sequence=seq)
tmpl = env.from_string('1\n2\r\n3\n4\n')
result = tmpl.render()
assert result.replace(seq, 'X') == '1X2X3X4'
def test_trailing_newline(self):
for keep in [True, False]:
env = Environment(keep_trailing_newline=keep)
for template,expected in [
('', {}),
('no\nnewline', {}),
('with\nnewline\n', {False: 'with\nnewline'}),
('with\nseveral\n\n\n', {False: 'with\nseveral\n\n'}),
]:
tmpl = env.from_string(template)
expect = expected.get(keep, template)
result = tmpl.render()
assert result == expect, (keep, template, result, expect)
class ParserTestCase(JinjaTestCase):
def test_php_syntax(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->')
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->\
<? for item in seq -?>
<?= item ?>
<?- endfor ?>''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_erb_syntax(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>')
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>\
<% for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_comment_syntax(self):
env = Environment('<!--', '-->', '${', '}', '<!--#', '-->')
tmpl = env.from_string('''\
<!--# I'm a comment, I'm not interesting -->\
<!-- for item in seq --->
${item}
<!--- endfor -->''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_balancing(self):
tmpl = env.from_string('''{{{'foo':'bar'}.foo}}''')
assert tmpl.render() == 'bar'
def test_start_comment(self):
tmpl = env.from_string('''{# foo comment
and bar comment #}
{% macro blub() %}foo{% endmacro %}
{{ blub() }}''')
assert tmpl.render().strip() == 'foo'
def test_line_syntax(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item}
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
list(range(5))
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item} ## the rest of the stuff
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
list(range(5))
def test_line_syntax_priority(self):
# XXX: why is the whitespace there in front of the newline?
env = Environment('{%', '%}', '${', '}', '/*', '*/', '##', '#')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
## for item in seq:
* ${item} # this is just extra stuff
## endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n* 2'
env = Environment('{%', '%}', '${', '}', '/*', '*/', '#', '##')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
# for item in seq:
* ${item} ## this is just extra stuff
## extra stuff i just want to ignore
# endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n\n* 2'
def test_error_messages(self):
def assert_error(code, expected):
try:
Template(code)
except TemplateSyntaxError as e:
assert str(e) == expected, 'unexpected error message'
else:
assert False, 'that was supposed to be an error'
assert_error('{% for item in seq %}...{% endif %}',
"Encountered unknown tag 'endif'. Jinja was looking "
"for the following tags: 'endfor' or 'else'. The "
"innermost block that needs to be closed is 'for'.")
assert_error('{% if foo %}{% for item in seq %}...{% endfor %}{% endfor %}',
"Encountered unknown tag 'endfor'. Jinja was looking for "
"the following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% if foo %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% for item in seq %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'endfor' or 'else'. The innermost block "
"that needs to be closed is 'for'.")
assert_error('{% block foo-bar-baz %}',
"Block names in Jinja have to be valid Python identifiers "
"and may not contain hyphens, use an underscore instead.")
assert_error('{% unknown_tag %}',
"Encountered unknown tag 'unknown_tag'.")
class SyntaxTestCase(JinjaTestCase):
def test_call(self):
env = Environment()
env.globals['foo'] = lambda a, b, c, e, g: a + b + c + e + g
tmpl = env.from_string("{{ foo('a', c='d', e='f', *['b'], **{'g': 'h'}) }}")
assert tmpl.render() == 'abdfh'
def test_slicing(self):
tmpl = env.from_string('{{ [1, 2, 3][:] }}|{{ [1, 2, 3][::-1] }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_attr(self):
tmpl = env.from_string("{{ foo.bar }}|{{ foo['bar'] }}")
assert tmpl.render(foo={'bar': 42}) == '42|42'
def test_subscript(self):
tmpl = env.from_string("{{ foo[0] }}|{{ foo[-1] }}")
assert tmpl.render(foo=[0, 1, 2]) == '0|2'
def test_tuple(self):
tmpl = env.from_string('{{ () }}|{{ (1,) }}|{{ (1, 2) }}')
assert tmpl.render() == '()|(1,)|(1, 2)'
def test_math(self):
tmpl = env.from_string('{{ (1 + 1 * 2) - 3 / 2 }}|{{ 2**3 }}')
assert tmpl.render() == '1.5|8'
def test_div(self):
tmpl = env.from_string('{{ 3 // 2 }}|{{ 3 / 2 }}|{{ 3 % 2 }}')
assert tmpl.render() == '1|1.5|1'
def test_unary(self):
tmpl = env.from_string('{{ +3 }}|{{ -3 }}')
assert tmpl.render() == '3|-3'
def test_concat(self):
tmpl = env.from_string("{{ [1, 2] ~ 'foo' }}")
assert tmpl.render() == '[1, 2]foo'
def test_compare(self):
tmpl = env.from_string('{{ 1 > 0 }}|{{ 1 >= 1 }}|{{ 2 < 3 }}|'
'{{ 2 == 2 }}|{{ 1 <= 1 }}')
assert tmpl.render() == 'True|True|True|True|True'
def test_inop(self):
tmpl = env.from_string('{{ 1 in [1, 2, 3] }}|{{ 1 not in [1, 2, 3] }}')
assert tmpl.render() == 'True|False'
def test_literals(self):
tmpl = env.from_string('{{ [] }}|{{ {} }}|{{ () }}')
assert tmpl.render().lower() == '[]|{}|()'
def test_bool(self):
tmpl = env.from_string('{{ true and false }}|{{ false '
'or true }}|{{ not false }}')
assert tmpl.render() == 'False|True|True'
def test_grouping(self):
tmpl = env.from_string('{{ (true and false) or (false and true) and not false }}')
assert tmpl.render() == 'False'
def test_django_attr(self):
tmpl = env.from_string('{{ [1, 2, 3].0 }}|{{ [[1]].0.0 }}')
assert tmpl.render() == '1|1'
def test_conditional_expression(self):
tmpl = env.from_string('''{{ 0 if true else 1 }}''')
assert tmpl.render() == '0'
def test_short_conditional_expression(self):
tmpl = env.from_string('<{{ 1 if false }}>')
assert tmpl.render() == '<>'
tmpl = env.from_string('<{{ (1 if false).bar }}>')
self.assert_raises(UndefinedError, tmpl.render)
def test_filter_priority(self):
tmpl = env.from_string('{{ "foo"|upper + "bar"|upper }}')
assert tmpl.render() == 'FOOBAR'
def test_function_calls(self):
tests = [
(True, '*foo, bar'),
(True, '*foo, *bar'),
(True, '*foo, bar=42'),
(True, '**foo, *bar'),
(True, '**foo, bar'),
(False, 'foo, bar'),
(False, 'foo, bar=42'),
(False, 'foo, bar=23, *args'),
(False, 'a, b=c, *d, **e'),
(False, '*foo, **bar')
]
for should_fail, sig in tests:
if should_fail:
self.assert_raises(TemplateSyntaxError,
env.from_string, '{{ foo(%s) }}' % sig)
else:
env.from_string('foo(%s)' % sig)
def test_tuple_expr(self):
for tmpl in [
'{{ () }}',
'{{ (1, 2) }}',
'{{ (1, 2,) }}',
'{{ 1, }}',
'{{ 1, 2 }}',
'{% for foo, bar in seq %}...{% endfor %}',
'{% for x in foo, bar %}...{% endfor %}',
'{% for x in foo, %}...{% endfor %}'
]:
assert env.from_string(tmpl)
def test_trailing_comma(self):
tmpl = env.from_string('{{ (1, 2,) }}|{{ [1, 2,] }}|{{ {1: 2,} }}')
assert tmpl.render().lower() == '(1, 2)|[1, 2]|{1: 2}'
def test_block_end_name(self):
env.from_string('{% block foo %}...{% endblock foo %}')
self.assert_raises(TemplateSyntaxError, env.from_string,
'{% block x %}{% endblock y %}')
def test_constant_casing(self):
for const in True, False, None:
tmpl = env.from_string('{{ %s }}|{{ %s }}|{{ %s }}' % (
str(const), str(const).lower(), str(const).upper()
))
assert tmpl.render() == '%s|%s|' % (const, const)
def test_test_chaining(self):
self.assert_raises(TemplateSyntaxError, env.from_string,
'{{ foo is string is sequence }}')
assert env.from_string('{{ 42 is string or 42 is number }}'
).render() == 'True'
def test_string_concatenation(self):
tmpl = env.from_string('{{ "foo" "bar" "baz" }}')
assert tmpl.render() == 'foobarbaz'
def test_notin(self):
bar = range(100)
tmpl = env.from_string('''{{ not 42 in bar }}''')
assert tmpl.render(bar=bar) == text_type(not 42 in bar)
def test_implicit_subscribed_tuple(self):
class Foo(object):
def __getitem__(self, x):
return x
t = env.from_string('{{ foo[1, 2] }}')
assert t.render(foo=Foo()) == u'(1, 2)'
def test_raw2(self):
tmpl = env.from_string('{% raw %}{{ FOO }} and {% BAR %}{% endraw %}')
assert tmpl.render() == '{{ FOO }} and {% BAR %}'
def test_const(self):
tmpl = env.from_string('{{ true }}|{{ false }}|{{ none }}|'
'{{ none is defined }}|{{ missing is defined }}')
assert tmpl.render() == 'True|False|None|True|False'
def test_neg_filter_priority(self):
node = env.parse('{{ -1|foo }}')
assert isinstance(node.body[0].nodes[0], nodes.Filter)
assert isinstance(node.body[0].nodes[0].node, nodes.Neg)
def test_const_assign(self):
constass1 = '''{% set true = 42 %}'''
constass2 = '''{% for none in seq %}{% endfor %}'''
for tmpl in constass1, constass2:
self.assert_raises(TemplateSyntaxError, env.from_string, tmpl)
def test_localset(self):
tmpl = env.from_string('''{% set foo = 0 %}\
{% for item in [1, 2] %}{% set foo = 1 %}{% endfor %}\
{{ foo }}''')
assert tmpl.render() == '0'
def test_parse_unary(self):
tmpl = env.from_string('{{ -foo["bar"] }}')
assert tmpl.render(foo={'bar': 42}) == '-42'
tmpl = env.from_string('{{ -foo["bar"]|abs }}')
assert tmpl.render(foo={'bar': 42}) == '42'
class LstripBlocksTestCase(JinjaTestCase):
def test_lstrip(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}\n {% endif %}''')
assert tmpl.render() == "\n"
def test_lstrip_trim(self):
env = Environment(lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' {% if True %}\n {% endif %}''')
assert tmpl.render() == ""
def test_no_lstrip(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {%+ if True %}\n {%+ endif %}''')
assert tmpl.render() == " \n "
def test_lstrip_endline(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' hello{% if True %}\n goodbye{% endif %}''')
assert tmpl.render() == " hello\n goodbye"
def test_lstrip_inline(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}hello {% endif %}''')
assert tmpl.render() == 'hello '
def test_lstrip_nested(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}a {% if True %}b {% endif %}c {% endif %}''')
assert tmpl.render() == 'a b c '
def test_lstrip_left_chars(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' abc {% if True %}
hello{% endif %}''')
assert tmpl.render() == ' abc \n hello'
def test_lstrip_embeded_strings(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% set x = " {% str %} " %}{{ x }}''')
assert tmpl.render() == ' {% str %} '
def test_lstrip_preserve_leading_newlines(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string('''\n\n\n{% set hello = 1 %}''')
assert tmpl.render() == '\n\n\n'
def test_lstrip_comment(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {# if True #}
hello
{#endif#}''')
assert tmpl.render() == '\nhello\n'
def test_lstrip_angle_bracket_simple(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' <% if True %>hello <% endif %>''')
assert tmpl.render() == 'hello '
def test_lstrip_angle_bracket_comment(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' <%# if True %>hello <%# endif %>''')
assert tmpl.render() == 'hello '
def test_lstrip_angle_bracket(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# regular comment %>
<% for item in seq %>
${item} ## the rest of the stuff
<% endfor %>''')
assert tmpl.render(seq=range(5)) == \
''.join('%s\n' % x for x in range(5))
def test_lstrip_angle_bracket_compact(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%#regular comment%>
<%for item in seq%>
${item} ## the rest of the stuff
<%endfor%>''')
assert tmpl.render(seq=range(5)) == \
''.join('%s\n' % x for x in range(5))
def test_php_syntax_with_manual(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<? for item in seq -?>
<?= item ?>
<?- endfor ?>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_php_syntax(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<? for item in seq ?>
<?= item ?>
<? endfor ?>''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_php_syntax_compact(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<?for item in seq?>
<?=item?>
<?endfor?>''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_erb_syntax(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
#env.from_string('')
#for n,r in env.lexer.rules.iteritems():
# print n
#print env.lexer.rules['root'][0][0].pattern
#print "'%s'" % tmpl.render(seq=range(5))
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<% for item in seq %>
<%= item %>
<% endfor %>
''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_erb_syntax_with_manual(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<% for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_erb_syntax_no_lstrip(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<%+ for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=range(5)) == ' 01234'
def test_comment_syntax(self):
env = Environment('<!--', '-->', '${', '}', '<!--#', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!--# I'm a comment, I'm not interesting -->\
<!-- for item in seq --->
${item}
<!--- endfor -->''')
assert tmpl.render(seq=range(5)) == '01234'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TokenStreamTestCase))
suite.addTest(unittest.makeSuite(LexerTestCase))
suite.addTest(unittest.makeSuite(ParserTestCase))
suite.addTest(unittest.makeSuite(SyntaxTestCase))
suite.addTest(unittest.makeSuite(LstripBlocksTestCase))
return suite
| gpl-2.0 |
MoritzS/django | django/core/mail/backends/base.py | 129 | 1660 | """Base email backend class."""
class BaseEmailBackend:
"""
Base class for email backend implementations.
Subclasses must at least overwrite send_messages().
open() and close() can be called indirectly by using a backend object as a
context manager:
with backend as connection:
# do something with connection
pass
"""
def __init__(self, fail_silently=False, **kwargs):
self.fail_silently = fail_silently
def open(self):
"""
Open a network connection.
This method can be overwritten by backend implementations to
open a network connection.
It's up to the backend implementation to track the status of
a network connection if it's needed by the backend.
This method can be called by applications to force a single
network connection to be used when sending mails. See the
send_messages() method of the SMTP backend for a reference
implementation.
The default implementation does nothing.
"""
pass
def close(self):
"""Close a network connection."""
pass
def __enter__(self):
try:
self.open()
except Exception:
self.close()
raise
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def send_messages(self, email_messages):
"""
Send one or more EmailMessage objects and return the number of email
messages sent.
"""
raise NotImplementedError('subclasses of BaseEmailBackend must override send_messages() method')
| bsd-3-clause |
CVML/scikit-learn | examples/linear_model/plot_sparse_recovery.py | 243 | 7461 | """
============================================================
Sparse recovery: feature selection for sparse linear models
============================================================
Given a small number of observations, we want to recover which features
of X are relevant to explain y. For this :ref:`sparse linear models
<l1_feature_selection>` can outperform standard statistical tests if the
true model is sparse, i.e. if a small fraction of the features are
relevant.
As detailed in :ref:`the compressive sensing notes
<compressive_sensing>`, the ability of L1-based approach to identify the
relevant variables depends on the sparsity of the ground truth, the
number of samples, the number of features, the conditioning of the
design matrix on the signal subspace, the amount of noise, and the
absolute value of the smallest non-zero coefficient [Wainwright2006]
(http://statistics.berkeley.edu/tech-reports/709.pdf).
Here we keep all parameters constant and vary the conditioning of the
design matrix. For a well-conditioned design matrix (small mutual
incoherence) we are exactly in compressive sensing conditions (i.i.d
Gaussian sensing matrix), and L1-recovery with the Lasso performs very
well. For an ill-conditioned matrix (high mutual incoherence),
regressors are very correlated, and the Lasso randomly selects one.
However, randomized-Lasso can recover the ground truth well.
In each situation, we first vary the alpha parameter setting the sparsity
of the estimated model and look at the stability scores of the randomized
Lasso. This analysis, knowing the ground truth, shows an optimal regime
in which relevant features stand out from the irrelevant ones. If alpha
is chosen too small, non-relevant variables enter the model. On the
opposite, if alpha is selected too large, the Lasso is equivalent to
stepwise regression, and thus brings no advantage over a univariate
F-test.
In a second time, we set alpha and compare the performance of different
feature selection methods, using the area under curve (AUC) of the
precision-recall.
"""
print(__doc__)
# Author: Alexandre Gramfort and Gael Varoquaux
# License: BSD 3 clause
import warnings
import matplotlib.pyplot as plt
import numpy as np
from scipy import linalg
from sklearn.linear_model import (RandomizedLasso, lasso_stability_path,
LassoLarsCV)
from sklearn.feature_selection import f_regression
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import auc, precision_recall_curve
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.utils.extmath import pinvh
from sklearn.utils import ConvergenceWarning
def mutual_incoherence(X_relevant, X_irelevant):
"""Mutual incoherence, as defined by formula (26a) of [Wainwright2006].
"""
projector = np.dot(np.dot(X_irelevant.T, X_relevant),
pinvh(np.dot(X_relevant.T, X_relevant)))
return np.max(np.abs(projector).sum(axis=1))
for conditioning in (1, 1e-4):
###########################################################################
# Simulate regression data with a correlated design
n_features = 501
n_relevant_features = 3
noise_level = .2
coef_min = .2
# The Donoho-Tanner phase transition is around n_samples=25: below we
# will completely fail to recover in the well-conditioned case
n_samples = 25
block_size = n_relevant_features
rng = np.random.RandomState(42)
# The coefficients of our model
coef = np.zeros(n_features)
coef[:n_relevant_features] = coef_min + rng.rand(n_relevant_features)
# The correlation of our design: variables correlated by blocs of 3
corr = np.zeros((n_features, n_features))
for i in range(0, n_features, block_size):
corr[i:i + block_size, i:i + block_size] = 1 - conditioning
corr.flat[::n_features + 1] = 1
corr = linalg.cholesky(corr)
# Our design
X = rng.normal(size=(n_samples, n_features))
X = np.dot(X, corr)
# Keep [Wainwright2006] (26c) constant
X[:n_relevant_features] /= np.abs(
linalg.svdvals(X[:n_relevant_features])).max()
X = StandardScaler().fit_transform(X.copy())
# The output variable
y = np.dot(X, coef)
y /= np.std(y)
# We scale the added noise as a function of the average correlation
# between the design and the output variable
y += noise_level * rng.normal(size=n_samples)
mi = mutual_incoherence(X[:, :n_relevant_features],
X[:, n_relevant_features:])
###########################################################################
# Plot stability selection path, using a high eps for early stopping
# of the path, to save computation time
alpha_grid, scores_path = lasso_stability_path(X, y, random_state=42,
eps=0.05)
plt.figure()
# We plot the path as a function of alpha/alpha_max to the power 1/3: the
# power 1/3 scales the path less brutally than the log, and enables to
# see the progression along the path
hg = plt.plot(alpha_grid[1:] ** .333, scores_path[coef != 0].T[1:], 'r')
hb = plt.plot(alpha_grid[1:] ** .333, scores_path[coef == 0].T[1:], 'k')
ymin, ymax = plt.ylim()
plt.xlabel(r'$(\alpha / \alpha_{max})^{1/3}$')
plt.ylabel('Stability score: proportion of times selected')
plt.title('Stability Scores Path - Mutual incoherence: %.1f' % mi)
plt.axis('tight')
plt.legend((hg[0], hb[0]), ('relevant features', 'irrelevant features'),
loc='best')
###########################################################################
# Plot the estimated stability scores for a given alpha
# Use 6-fold cross-validation rather than the default 3-fold: it leads to
# a better choice of alpha:
# Stop the user warnings outputs- they are not necessary for the example
# as it is specifically set up to be challenging.
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
warnings.simplefilter('ignore', ConvergenceWarning)
lars_cv = LassoLarsCV(cv=6).fit(X, y)
# Run the RandomizedLasso: we use a paths going down to .1*alpha_max
# to avoid exploring the regime in which very noisy variables enter
# the model
alphas = np.linspace(lars_cv.alphas_[0], .1 * lars_cv.alphas_[0], 6)
clf = RandomizedLasso(alpha=alphas, random_state=42).fit(X, y)
trees = ExtraTreesRegressor(100).fit(X, y)
# Compare with F-score
F, _ = f_regression(X, y)
plt.figure()
for name, score in [('F-test', F),
('Stability selection', clf.scores_),
('Lasso coefs', np.abs(lars_cv.coef_)),
('Trees', trees.feature_importances_),
]:
precision, recall, thresholds = precision_recall_curve(coef != 0,
score)
plt.semilogy(np.maximum(score / np.max(score), 1e-4),
label="%s. AUC: %.3f" % (name, auc(recall, precision)))
plt.plot(np.where(coef != 0)[0], [2e-4] * n_relevant_features, 'mo',
label="Ground truth")
plt.xlabel("Features")
plt.ylabel("Score")
# Plot only the 100 first coefficients
plt.xlim(0, 100)
plt.legend(loc='best')
plt.title('Feature selection scores - Mutual incoherence: %.1f'
% mi)
plt.show()
| bsd-3-clause |
yavuzovski/playground | python/django/RESTTest/.venv/lib/python3.4/site-packages/django/contrib/gis/db/models/query.py | 46 | 32490 | import warnings
from django.contrib.gis.db.models.fields import (
GeometryField, LineStringField, PointField, get_srid_info,
)
from django.contrib.gis.db.models.lookups import GISLookup
from django.contrib.gis.db.models.sql import (
AreaField, DistanceField, GeomField, GMLField,
)
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.measure import Area, Distance
from django.db import connections
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import RawSQL
from django.db.models.fields import Field
from django.db.models.query import QuerySet
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
class GeoQuerySet(QuerySet):
"The Geographic QuerySet."
# ### GeoQuerySet Methods ###
def area(self, tolerance=0.05, **kwargs):
"""
Returns the area of the geographic field in an `area` attribute on
each element of this GeoQuerySet.
"""
# Performing setup here rather than in `_spatial_attribute` so that
# we can get the units for `AreaField`.
procedure_args, geo_field = self._spatial_setup(
'area', field_name=kwargs.get('field_name'))
s = {'procedure_args': procedure_args,
'geo_field': geo_field,
'setup': False,
}
connection = connections[self.db]
backend = connection.ops
if backend.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
s['select_field'] = AreaField('sq_m') # Oracle returns area in units of meters.
elif backend.postgis or backend.spatialite:
if backend.geography:
# Geography fields support area calculation, returns square meters.
s['select_field'] = AreaField('sq_m')
elif not geo_field.geodetic(connection):
# Getting the area units of the geographic field.
s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name(connection)))
else:
# TODO: Do we want to support raw number areas for geodetic fields?
raise Exception('Area on geodetic coordinate systems not supported.')
return self._spatial_attribute('area', s, **kwargs)
def centroid(self, **kwargs):
"""
Returns the centroid of the geographic field in a `centroid`
attribute on each element of this GeoQuerySet.
"""
return self._geom_attribute('centroid', **kwargs)
def difference(self, geom, **kwargs):
"""
Returns the spatial difference of the geographic field in a `difference`
attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('difference', geom, **kwargs)
def distance(self, geom, **kwargs):
"""
Returns the distance from the given geographic field name to the
given geometry in a `distance` attribute on each element of the
GeoQuerySet.
Keyword Arguments:
`spheroid` => If the geometry field is geodetic and PostGIS is
the spatial database, then the more accurate
spheroid calculation will be used instead of the
quicker sphere calculation.
`tolerance` => Used only for Oracle. The tolerance is
in meters -- a default of 5 centimeters (0.05)
is used.
"""
return self._distance_attribute('distance', geom, **kwargs)
def envelope(self, **kwargs):
"""
Returns a Geometry representing the bounding box of the
Geometry field in an `envelope` attribute on each element of
the GeoQuerySet.
"""
return self._geom_attribute('envelope', **kwargs)
def force_rhr(self, **kwargs):
"""
Returns a modified version of the Polygon/MultiPolygon in which
all of the vertices follow the Right-Hand-Rule. By default,
this is attached as the `force_rhr` attribute on each element
of the GeoQuerySet.
"""
return self._geom_attribute('force_rhr', **kwargs)
def geojson(self, precision=8, crs=False, bbox=False, **kwargs):
"""
Returns a GeoJSON representation of the geometry field in a `geojson`
attribute on each element of the GeoQuerySet.
The `crs` and `bbox` keywords may be set to True if the user wants
the coordinate reference system and the bounding box to be included
in the GeoJSON representation of the geometry.
"""
backend = connections[self.db].ops
if not backend.geojson:
raise NotImplementedError('Only PostGIS and SpatiaLite support GeoJSON serialization.')
if not isinstance(precision, six.integer_types):
raise TypeError('Precision keyword must be set with an integer.')
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
s = {'desc': 'GeoJSON',
'procedure_args': {'precision': precision, 'options': options},
'procedure_fmt': '%(geo_col)s,%(precision)s,%(options)s',
}
return self._spatial_attribute('geojson', s, **kwargs)
def geohash(self, precision=20, **kwargs):
"""
Returns a GeoHash representation of the given field in a `geohash`
attribute on each element of the GeoQuerySet.
The `precision` keyword may be used to custom the number of
_characters_ used in the output GeoHash, the default is 20.
"""
s = {'desc': 'GeoHash',
'procedure_args': {'precision': precision},
'procedure_fmt': '%(geo_col)s,%(precision)s',
}
return self._spatial_attribute('geohash', s, **kwargs)
def gml(self, precision=8, version=2, **kwargs):
"""
Returns GML representation of the given field in a `gml` attribute
on each element of the GeoQuerySet.
"""
backend = connections[self.db].ops
s = {'desc': 'GML', 'procedure_args': {'precision': precision}}
if backend.postgis:
s['procedure_fmt'] = '%(version)s,%(geo_col)s,%(precision)s'
s['procedure_args'] = {'precision': precision, 'version': version}
if backend.oracle:
s['select_field'] = GMLField()
return self._spatial_attribute('gml', s, **kwargs)
def intersection(self, geom, **kwargs):
"""
Returns the spatial intersection of the Geometry field in
an `intersection` attribute on each element of this
GeoQuerySet.
"""
return self._geomset_attribute('intersection', geom, **kwargs)
def kml(self, **kwargs):
"""
Returns KML representation of the geometry field in a `kml`
attribute on each element of this GeoQuerySet.
"""
s = {'desc': 'KML',
'procedure_fmt': '%(geo_col)s,%(precision)s',
'procedure_args': {'precision': kwargs.pop('precision', 8)},
}
return self._spatial_attribute('kml', s, **kwargs)
def length(self, **kwargs):
"""
Returns the length of the geometry field as a `Distance` object
stored in a `length` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('length', None, **kwargs)
def mem_size(self, **kwargs):
"""
Returns the memory size (number of bytes) that the geometry field takes
in a `mem_size` attribute on each element of this GeoQuerySet.
"""
return self._spatial_attribute('mem_size', {}, **kwargs)
def num_geom(self, **kwargs):
"""
Returns the number of geometries if the field is a
GeometryCollection or Multi* Field in a `num_geom`
attribute on each element of this GeoQuerySet; otherwise
the sets with None.
"""
return self._spatial_attribute('num_geom', {}, **kwargs)
def num_points(self, **kwargs):
"""
Returns the number of points in the first linestring in the
Geometry field in a `num_points` attribute on each element of
this GeoQuerySet; otherwise sets with None.
"""
return self._spatial_attribute('num_points', {}, **kwargs)
def perimeter(self, **kwargs):
"""
Returns the perimeter of the geometry field as a `Distance` object
stored in a `perimeter` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('perimeter', None, **kwargs)
def point_on_surface(self, **kwargs):
"""
Returns a Point geometry guaranteed to lie on the surface of the
Geometry field in a `point_on_surface` attribute on each element
of this GeoQuerySet; otherwise sets with None.
"""
return self._geom_attribute('point_on_surface', **kwargs)
def reverse_geom(self, **kwargs):
"""
Reverses the coordinate order of the geometry, and attaches as a
`reverse` attribute on each element of this GeoQuerySet.
"""
s = {'select_field': GeomField()}
kwargs.setdefault('model_att', 'reverse_geom')
if connections[self.db].ops.oracle:
s['geo_field_type'] = LineStringField
return self._spatial_attribute('reverse', s, **kwargs)
def scale(self, x, y, z=0.0, **kwargs):
"""
Scales the geometry to a new size by multiplying the ordinates
with the given x,y,z scale factors.
"""
if connections[self.db].ops.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D scaling.')
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
'procedure_args': {'x': x, 'y': y},
'select_field': GeomField(),
}
else:
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args': {'x': x, 'y': y, 'z': z},
'select_field': GeomField(),
}
return self._spatial_attribute('scale', s, **kwargs)
def snap_to_grid(self, *args, **kwargs):
"""
Snap all points of the input geometry to the grid. How the
geometry is snapped to the grid depends on how many arguments
were given:
- 1 argument : A single size to snap both the X and Y grids to.
- 2 arguments: X and Y sizes to snap the grid to.
- 4 arguments: X, Y sizes and the X, Y origins.
"""
if False in [isinstance(arg, (float,) + six.integer_types) for arg in args]:
raise TypeError('Size argument(s) for the grid must be a float or integer values.')
nargs = len(args)
if nargs == 1:
size = args[0]
procedure_fmt = '%(geo_col)s,%(size)s'
procedure_args = {'size': size}
elif nargs == 2:
xsize, ysize = args
procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize': xsize, 'ysize': ysize}
elif nargs == 4:
xsize, ysize, xorigin, yorigin = args
procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize': xsize, 'ysize': ysize,
'xorigin': xorigin, 'yorigin': yorigin}
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.')
s = {'procedure_fmt': procedure_fmt,
'procedure_args': procedure_args,
'select_field': GeomField(),
}
return self._spatial_attribute('snap_to_grid', s, **kwargs)
def svg(self, relative=False, precision=8, **kwargs):
"""
Returns SVG representation of the geographic field in a `svg`
attribute on each element of this GeoQuerySet.
Keyword Arguments:
`relative` => If set to True, this will evaluate the path in
terms of relative moves (rather than absolute).
`precision` => May be used to set the maximum number of decimal
digits used in output (defaults to 8).
"""
relative = int(bool(relative))
if not isinstance(precision, six.integer_types):
raise TypeError('SVG precision keyword argument must be an integer.')
s = {
'desc': 'SVG',
'procedure_fmt': '%(geo_col)s,%(rel)s,%(precision)s',
'procedure_args': {
'rel': relative,
'precision': precision,
}
}
return self._spatial_attribute('svg', s, **kwargs)
def sym_difference(self, geom, **kwargs):
"""
Returns the symmetric difference of the geographic field in a
`sym_difference` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('sym_difference', geom, **kwargs)
def translate(self, x, y, z=0.0, **kwargs):
"""
Translates the geometry to a new location using the given numeric
parameters as offsets.
"""
if connections[self.db].ops.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D translation.')
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
'procedure_args': {'x': x, 'y': y},
'select_field': GeomField(),
}
else:
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args': {'x': x, 'y': y, 'z': z},
'select_field': GeomField(),
}
return self._spatial_attribute('translate', s, **kwargs)
def transform(self, srid=4326, **kwargs):
"""
Transforms the given geometry field to the given SRID. If no SRID is
provided, the transformation will default to using 4326 (WGS84).
"""
if not isinstance(srid, six.integer_types):
raise TypeError('An integer SRID must be provided.')
field_name = kwargs.get('field_name')
self._spatial_setup('transform', field_name=field_name)
self.query.add_context('transformed_srid', srid)
return self._clone()
def union(self, geom, **kwargs):
"""
Returns the union of the geographic field with the given
Geometry in a `union` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('union', geom, **kwargs)
# ### Private API -- Abstracted DRY routines. ###
def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None):
"""
Performs set up for executing the spatial function.
"""
# Does the spatial backend support this?
connection = connections[self.db]
func = getattr(connection.ops, att, False)
if desc is None:
desc = att
if not func:
raise NotImplementedError('%s stored procedure not available on '
'the %s backend.' %
(desc, connection.ops.name))
# Initializing the procedure arguments.
procedure_args = {'function': func}
# Is there a geographic field in the model to perform this
# operation on?
geo_field = self._geo_field(field_name)
if not geo_field:
raise TypeError('%s output only available on GeometryFields.' % func)
# If the `geo_field_type` keyword was used, then enforce that
# type limitation.
if geo_field_type is not None and not isinstance(geo_field, geo_field_type):
raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__))
# Setting the procedure args.
procedure_args['geo_col'] = self._geocol_select(geo_field, field_name)
return procedure_args, geo_field
def _spatial_attribute(self, att, settings, field_name=None, model_att=None):
"""
DRY routine for calling a spatial stored procedure on a geometry column
and attaching its output as an attribute of the model.
Arguments:
att:
The name of the spatial attribute that holds the spatial
SQL function to call.
settings:
Dictionary of internal settings to customize for the spatial procedure.
Public Keyword Arguments:
field_name:
The name of the geographic field to call the spatial
function on. May also be a lookup to a geometry field
as part of a foreign key relation.
model_att:
The name of the model attribute to attach the output of
the spatial function to.
"""
warnings.warn(
"The %s GeoQuerySet method is deprecated. See GeoDjango Functions "
"documentation to find the expression-based replacement." % att,
RemovedInDjango20Warning, stacklevel=2
)
# Default settings.
settings.setdefault('desc', None)
settings.setdefault('geom_args', ())
settings.setdefault('geom_field', None)
settings.setdefault('procedure_args', {})
settings.setdefault('procedure_fmt', '%(geo_col)s')
settings.setdefault('select_params', [])
connection = connections[self.db]
# Performing setup for the spatial column, unless told not to.
if settings.get('setup', True):
default_args, geo_field = self._spatial_setup(
att, desc=settings['desc'], field_name=field_name,
geo_field_type=settings.get('geo_field_type'))
for k, v in six.iteritems(default_args):
settings['procedure_args'].setdefault(k, v)
else:
geo_field = settings['geo_field']
# The attribute to attach to the model.
if not isinstance(model_att, six.string_types):
model_att = att
# Special handling for any argument that is a geometry.
for name in settings['geom_args']:
# Using the field's get_placeholder() routine to get any needed
# transformation SQL.
geom = geo_field.get_prep_value(settings['procedure_args'][name])
params = geo_field._get_db_prep_lookup('contains', geom, connection=connection)
geom_placeholder = geo_field.get_placeholder(geom, None, connection)
# Replacing the procedure format with that of any needed
# transformation SQL.
old_fmt = '%%(%s)s' % name
new_fmt = geom_placeholder % '%%s'
settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt)
settings['select_params'].extend(params)
# Getting the format for the stored procedure.
fmt = '%%(function)s(%s)' % settings['procedure_fmt']
# If the result of this function needs to be converted.
if settings.get('select_field'):
select_field = settings['select_field']
if connection.ops.oracle:
select_field.empty_strings_allowed = False
else:
select_field = Field()
# Finally, setting the extra selection attribute with
# the format string expanded with the stored procedure
# arguments.
self.query.add_annotation(
RawSQL(fmt % settings['procedure_args'], settings['select_params'], select_field),
model_att)
return self
def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs):
"""
DRY routine for GeoQuerySet distance attribute routines.
"""
# Setting up the distance procedure arguments.
procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name'))
# If geodetic defaulting distance attribute to meters (Oracle and
# PostGIS spherical distances return meters). Otherwise, use the
# units of the geometry field.
connection = connections[self.db]
geodetic = geo_field.geodetic(connection)
geography = geo_field.geography
if geodetic:
dist_att = 'm'
else:
dist_att = Distance.unit_attname(geo_field.units_name(connection))
# Shortcut booleans for what distance function we're using and
# whether the geometry field is 3D.
distance = func == 'distance'
length = func == 'length'
perimeter = func == 'perimeter'
if not (distance or length or perimeter):
raise ValueError('Unknown distance function: %s' % func)
geom_3d = geo_field.dim == 3
# The field's _get_db_prep_lookup() is used to get any
# extra distance parameters. Here we set up the
# parameters that will be passed in to field's function.
lookup_params = [geom or 'POINT (0 0)', 0]
# Getting the spatial backend operations.
backend = connection.ops
# If the spheroid calculation is desired, either by the `spheroid`
# keyword or when calculating the length of geodetic field, make
# sure the 'spheroid' distance setting string is passed in so we
# get the correct spatial stored procedure.
if spheroid or (backend.postgis and geodetic and
(not geography) and length):
lookup_params.append('spheroid')
lookup_params = geo_field.get_prep_value(lookup_params)
params = geo_field._get_db_prep_lookup('distance_lte', lookup_params, connection=connection)
# The `geom_args` flag is set to true if a geometry parameter was
# passed in.
geom_args = bool(geom)
if backend.oracle:
if distance:
procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s'
elif length or perimeter:
procedure_fmt = '%(geo_col)s,%(tolerance)s'
procedure_args['tolerance'] = tolerance
else:
# Getting whether this field is in units of degrees since the field may have
# been transformed via the `transform` GeoQuerySet method.
srid = self.query.get_context('transformed_srid')
if srid:
u, unit_name, s = get_srid_info(srid, connection)
geodetic = unit_name.lower() in geo_field.geodetic_units
if geodetic and (not connection.features.supports_distance_geodetic or connection.ops.spatialite):
raise ValueError(
'This database does not support linear distance '
'calculations on geodetic coordinate systems.'
)
if distance:
if srid:
# Setting the `geom_args` flag to false because we want to handle
# transformation SQL here, rather than the way done by default
# (which will transform to the original SRID of the field rather
# than to what was transformed to).
geom_args = False
procedure_fmt = '%s(%%(geo_col)s, %s)' % (backend.transform, srid)
if geom.srid is None or geom.srid == srid:
# If the geom parameter srid is None, it is assumed the coordinates
# are in the transformed units. A placeholder is used for the
# geometry parameter. `GeomFromText` constructor is also needed
# to wrap geom placeholder for SpatiaLite.
if backend.spatialite:
procedure_fmt += ', %s(%%%%s, %s)' % (backend.from_text, srid)
else:
procedure_fmt += ', %%s'
else:
# We need to transform the geom to the srid specified in `transform()`,
# so wrapping the geometry placeholder in transformation SQL.
# SpatiaLite also needs geometry placeholder wrapped in `GeomFromText`
# constructor.
if backend.spatialite:
procedure_fmt += (', %s(%s(%%%%s, %s), %s)' % (
backend.transform, backend.from_text,
geom.srid, srid))
else:
procedure_fmt += ', %s(%%%%s, %s)' % (backend.transform, srid)
else:
# `transform()` was not used on this GeoQuerySet.
procedure_fmt = '%(geo_col)s,%(geom)s'
if not geography and geodetic:
# Spherical distance calculation is needed (because the geographic
# field is geodetic). However, the PostGIS ST_distance_sphere/spheroid()
# procedures may only do queries from point columns to point geometries
# some error checking is required.
if not backend.geography:
if not isinstance(geo_field, PointField):
raise ValueError('Spherical distance calculation only supported on PointFields.')
if not str(Geometry(six.memoryview(params[0].ewkb)).geom_type) == 'Point':
raise ValueError(
'Spherical distance calculation only supported with '
'Point Geometry parameters'
)
# The `function` procedure argument needs to be set differently for
# geodetic distance calculations.
if spheroid:
# Call to distance_spheroid() requires spheroid param as well.
procedure_fmt += ",'%(spheroid)s'"
procedure_args.update({'function': backend.distance_spheroid, 'spheroid': params[1]})
else:
procedure_args.update({'function': backend.distance_sphere})
elif length or perimeter:
procedure_fmt = '%(geo_col)s'
if not geography and geodetic and length:
# There's no `length_sphere`, and `length_spheroid` also
# works on 3D geometries.
procedure_fmt += ",'%(spheroid)s'"
procedure_args.update({'function': backend.length_spheroid, 'spheroid': params[1]})
elif geom_3d and connection.features.supports_3d_functions:
# Use 3D variants of perimeter and length routines on supported backends.
if perimeter:
procedure_args.update({'function': backend.perimeter3d})
elif length:
procedure_args.update({'function': backend.length3d})
# Setting up the settings for `_spatial_attribute`.
s = {'select_field': DistanceField(dist_att),
'setup': False,
'geo_field': geo_field,
'procedure_args': procedure_args,
'procedure_fmt': procedure_fmt,
}
if geom_args:
s['geom_args'] = ('geom',)
s['procedure_args']['geom'] = geom
elif geom:
# The geometry is passed in as a parameter because we handled
# transformation conditions in this routine.
s['select_params'] = [backend.Adapter(geom)]
return self._spatial_attribute(func, s, **kwargs)
def _geom_attribute(self, func, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute (e.g., `centroid`, `point_on_surface`).
"""
s = {'select_field': GeomField()}
if connections[self.db].ops.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args'] = {'tolerance': tolerance}
return self._spatial_attribute(func, s, **kwargs)
def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute and takes a Geoemtry parameter. This is used
for geometry set-like operations (e.g., intersection, difference,
union, sym_difference).
"""
s = {
'geom_args': ('geom',),
'select_field': GeomField(),
'procedure_fmt': '%(geo_col)s,%(geom)s',
'procedure_args': {'geom': geom},
}
if connections[self.db].ops.oracle:
s['procedure_fmt'] += ',%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
return self._spatial_attribute(func, s, **kwargs)
def _geocol_select(self, geo_field, field_name):
"""
Helper routine for constructing the SQL to select the geographic
column. Takes into account if the geographic field is in a
ForeignKey relation to the current model.
"""
compiler = self.query.get_compiler(self.db)
opts = self.model._meta
if geo_field not in opts.fields:
# Is this operation going to be on a related geographic field?
# If so, it'll have to be added to the select related information
# (e.g., if 'location__point' was given as the field name, then
# chop the non-relational field and add select_related('location')).
# Note: the operation really is defined as "must add select related!"
self.query.add_select_related([field_name.rsplit(LOOKUP_SEP, 1)[0]])
# Call pre_sql_setup() so that compiler.select gets populated.
compiler.pre_sql_setup()
for col, _, _ in compiler.select:
if col.output_field == geo_field:
return col.as_sql(compiler, compiler.connection)[0]
raise ValueError("%r not in compiler's related_select_cols" % geo_field)
elif geo_field not in opts.local_fields:
# This geographic field is inherited from another model, so we have to
# use the db table for the _parent_ model instead.
parent_model = geo_field.model._meta.concrete_model
return self._field_column(compiler, geo_field, parent_model._meta.db_table)
else:
return self._field_column(compiler, geo_field)
# Private API utilities, subject to change.
def _geo_field(self, field_name=None):
"""
Returns the first Geometry field encountered or the one specified via
the `field_name` keyword. The `field_name` may be a string specifying
the geometry field on this GeoQuerySet's model, or a lookup string
to a geometry field via a ForeignKey relation.
"""
if field_name is None:
# Incrementing until the first geographic field is found.
for field in self.model._meta.fields:
if isinstance(field, GeometryField):
return field
return False
else:
# Otherwise, check by the given field name -- which may be
# a lookup to a _related_ geographic field.
return GISLookup._check_geo_field(self.model._meta, field_name)
def _field_column(self, compiler, field, table_alias=None, column=None):
"""
Helper function that returns the database column for the given field.
The table and column are returned (quoted) in the proper format, e.g.,
`"geoapp_city"."point"`. If `table_alias` is not specified, the
database table associated with the model of this `GeoQuerySet` will be
used. If `column` is specified, it will be used instead of the value
in `field.column`.
"""
if table_alias is None:
table_alias = compiler.query.get_meta().db_table
return "%s.%s" % (compiler.quote_name_unless_alias(table_alias),
compiler.connection.ops.quote_name(column or field.column))
| gpl-3.0 |
vaygr/ansible | lib/ansible/modules/windows/win_get_url.py | 11 | 4502 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Paul Durivage <paul.durivage@rackspace.com>, and others
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_get_url
version_added: "1.7"
short_description: Fetches a file from a given URL
description:
- Fetches a file from a URL and saves it locally.
- For non-Windows targets, use the M(get_url) module instead.
author:
- Paul Durivage (@angstwad)
- Takeshi Kuramochi (@tksarah)
options:
url:
description:
- The full URL of a file to download.
required: yes
dest:
description:
- The location to save the file at the URL.
- Be sure to include a filename and extension as appropriate.
required: yes
force:
description:
- If C(yes), will always download the file. If C(no), will only
download the file if it does not exist or the remote file has been
modified more recently than the local file.
- This works by sending an http HEAD request to retrieve last modified
time of the requested resource, so for this to work, the remote web
server must support HEAD requests.
type: bool
default: 'yes'
version_added: "2.0"
headers:
description:
- Add custom HTTP headers to a request (as a dictionary).
version_added: '2.4'
url_username:
description:
- Basic authentication username.
aliases: [ username ]
url_password:
description:
- Basic authentication password.
aliases: [ password ]
force_basic_auth:
description:
- If C(yes), will add a Basic authentication header on the initial request.
- If C(no), will use Microsoft's WebClient to handle authentication.
type: bool
default: 'no'
version_added: "2.5"
skip_certificate_validation:
description:
- This option is deprecated since v2.4, please use C(validate_certs) instead.
- If C(yes), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
type: bool
default: 'no'
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
- If C(skip_certificate_validation) was set, it overrides this option.
type: bool
default: 'yes'
version_added: '2.4'
proxy_url:
description:
- The full URL of the proxy server to download through.
version_added: "2.0"
proxy_username:
description:
- Proxy authentication username.
version_added: "2.0"
proxy_password:
description:
- Proxy authentication password.
version_added: "2.0"
use_proxy:
description:
- If C(no), it will not use a proxy, even if one is defined in an environment
variable on the target hosts.
type: bool
default: 'yes'
version_added: '2.4'
timeout:
description:
- Timeout in seconds for URL request.
default: 10
version_added : '2.4'
notes:
- For non-Windows targets, use the M(get_url) module instead.
'''
EXAMPLES = r'''
- name: Download earthrise.jpg to specified path
win_get_url:
url: http://www.example.com/earthrise.jpg
dest: C:\Users\RandomUser\earthrise.jpg
- name: Download earthrise.jpg to specified path only if modified
win_get_url:
url: http://www.example.com/earthrise.jpg
dest: C:\Users\RandomUser\earthrise.jpg
force: no
- name: Download earthrise.jpg to specified path through a proxy server.
win_get_url:
url: http://www.example.com/earthrise.jpg
dest: C:\Users\RandomUser\earthrise.jpg
proxy_url: http://10.0.0.1:8080
proxy_username: username
proxy_password: password
'''
RETURN = r'''
dest:
description: destination file/path
returned: always
type: string
sample: C:\Users\RandomUser\earthrise.jpg
url:
description: requested url
returned: always
type: string
sample: http://www.example.com/earthrise.jpg
msg:
description: Error message, or HTTP status message from web-server
returned: always
type: string
sample: OK
status_code:
description: HTTP status code
returned: always
type: int
sample: 200
'''
| gpl-3.0 |
wendykan/Metrics | Python/ml_metrics/test/test_edit_distance.py | 8 | 1809 | #! /usr/bin/env python2.7
from __future__ import division
import unittest
import ml_metrics as metrics
class TestEditDistance(unittest.TestCase):
def test_levenshtein(self):
self.assertEqual(metrics.levenshtein("intention", "execution"), 5)
self.assertEqual(metrics.levenshtein("sitting", "kitten"), 3)
self.assertEqual(metrics.levenshtein("Saturday", "Sunday"), 3)
self.assertEqual(metrics.levenshtein("sitting", ""), 7)
self.assertEqual(metrics.levenshtein("", "Ben"), 3)
self.assertEqual(metrics.levenshtein("cat", "cat"), 0)
self.assertEqual(metrics.levenshtein("hat", "cat"), 1)
self.assertEqual(metrics.levenshtein("at", "cat"), 1)
self.assertEqual(metrics.levenshtein("", "a"), 1)
self.assertEqual(metrics.levenshtein("a", ""), 1)
self.assertEqual(metrics.levenshtein("", ""), 0)
self.assertEqual(metrics.levenshtein("ant", "aunt"), 1)
self.assertEqual(metrics.levenshtein("Samantha", "Sam"), 5)
self.assertEqual(metrics.levenshtein("Flomax", "Volmax"), 3)
self.assertEqual(metrics.levenshtein([1], [1]), 0)
self.assertEqual(metrics.levenshtein([1], [1,2]), 1)
self.assertEqual(metrics.levenshtein([1], [1,10]), 1)
self.assertEqual(metrics.levenshtein([1,2], [10,20]), 2)
self.assertEqual(metrics.levenshtein([1,2], [10,20,30]), 3)
self.assertEqual(metrics.levenshtein([3,3,4], [4,1,4,3]), 3)
def test_levenshtein_normalized(self):
self.assertEqual(metrics.levenshtein("intention", "execution", True), 5/9)
self.assertEqual(metrics.levenshtein("sitting", "kitten", normalize=True), 3/7)
self.assertEqual(metrics.levenshtein("Saturday", "Sunday", True), 3/8)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
grycap/clues | cluesplugins/kubernetes.py | 1 | 13204 | #!/usr/bin/env python
#
# CLUES - Cluster Energy Saving System
# Copyright (C) 2015 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import requests
import base64
import json
import cpyutils.config
import clueslib.helpers as Helpers
from cpyutils.evaluate import TypedNumber, TypedClass, TypedList
from cpyutils.log import Log
from clueslib.node import NodeInfo
from clueslib.platform import LRMS
from clueslib.request import Request, ResourcesNeeded, JobInfo
_LOGGER = Log("PLUGIN-KUBERNETES")
class lrms(LRMS):
def _get_auth_header(self, auth):
"""
Generate the auth header needed to contact with the Kubernetes API server.
"""
auth_header = {}
if 'username' in auth and 'password' in auth:
passwd = auth['password']
user = auth['username']
auth_header = {'Authorization': 'Basic ' +
(base64.encodestring((user + ':' + passwd).encode('utf-8'))).strip().decode('utf-8')}
elif 'token' in auth:
token = auth['token']
auth_header = {'Authorization': 'Bearer ' + token}
return auth_header
def _create_request(self, method, url, auth_data, headers=None, body=None):
try:
if headers is None:
headers = {}
auth_header = self._get_auth_header(auth_data)
if auth_header:
headers.update(auth_header)
url = "%s%s" % (self._server_url, url)
resp = requests.request(method, url, verify=False, headers=headers, data=body)
if resp.status_code == 200:
return resp.json()
else:
_LOGGER.error("Error contacting Kubernetes API: %s - %s" % (resp.status_code, resp.text))
return None
except Exception as ex:
_LOGGER.error("Error contacting Kubernetes API: %s" % str(ex))
return None
def __init__(self, KUBERNETES_SERVER=None, KUBERNETES_PODS_API_URL_PATH=None,
KUBERNETES_NODES_API_URL_PATH=None, KUBERNETES_TOKEN=None, KUBERNETES_NODE_MEMORY=None,
KUBERNETES_NODE_SLOTS=None, KUBERNETES_NODE_PODS=None):
config_kube = cpyutils.config.Configuration(
"KUBERNETES",
{
"KUBERNETES_SERVER": "http://localhost:8080",
"KUBERNETES_PODS_API_URL_PATH": "/api/v1/pods",
"KUBERNETES_NODES_API_URL_PATH": "/api/v1/nodes",
"KUBERNETES_TOKEN": None,
"KUBERNETES_NODE_MEMORY": 1073741824,
"KUBERNETES_NODE_SLOTS": 1,
"KUBERNETES_NODE_PODS": 110,
}
)
self._server_url = Helpers.val_default(KUBERNETES_SERVER, config_kube.KUBERNETES_SERVER)
self._pods_api_url_path = Helpers.val_default(KUBERNETES_PODS_API_URL_PATH,
config_kube.KUBERNETES_PODS_API_URL_PATH)
self._nodes_api_url_path = Helpers.val_default(KUBERNETES_NODES_API_URL_PATH,
config_kube.KUBERNETES_NODES_API_URL_PATH)
token = Helpers.val_default(KUBERNETES_TOKEN, config_kube.KUBERNETES_TOKEN)
self._node_memory = Helpers.val_default(KUBERNETES_NODE_MEMORY, config_kube.KUBERNETES_NODE_MEMORY)
self._node_slots = Helpers.val_default(KUBERNETES_NODE_SLOTS, config_kube.KUBERNETES_NODE_SLOTS)
self._node_pods = Helpers.val_default(KUBERNETES_NODE_PODS, config_kube.KUBERNETES_NODE_PODS)
if token:
self.auth_data = {"token": token}
else:
self.auth_data = {}
LRMS.__init__(self, "KUBERNETES_%s" % self._server_url)
def _get_memory_in_bytes(self, str_memory):
str_memory = str_memory.lower()
if str_memory.strip()[-2:] in ['mi', 'gi', 'ki', 'ti']:
unit = str_memory.strip()[-2:][0]
memory = int(str_memory.strip()[:-2])
elif str_memory.strip()[-1:] in ['m', 'g', 'k', 't']:
unit = str_memory.strip()[-1:]
memory = int(str_memory.strip()[:-1])
else:
return int(str_memory)
if unit == 'k':
memory *= 1024
elif unit == 'm':
memory *= 1024 * 1024
elif unit == 'g':
memory *= 1024 * 1024 * 1024
elif unit == 't':
memory *= 1024 * 1024 * 1024 * 1024
return memory
def _get_node_used_resources(self, nodename, pods_data):
used_mem = 0
used_cpus = 0.0
used_pods = 0
system_pods = 0
if pods_data:
for pod in pods_data["items"]:
if "nodeName" in pod["spec"] and nodename == pod["spec"]["nodeName"]:
# do not count the number of pods in case finished jobs
if pod["status"]["phase"] not in ["Succeeded", "Failed"]:
# do not count the number of pods in case of system ones
if pod["metadata"]["namespace"] == "kube-system":
system_pods += 1
used_pods += 1
cpus, memory = self._get_pod_cpus_and_memory(pod)
used_mem += memory
used_cpus += cpus
return used_mem, used_cpus, used_pods, system_pods
def get_nodeinfolist(self):
nodeinfolist = collections.OrderedDict()
nodes_data = self._create_request('GET', self._nodes_api_url_path, self.auth_data)
if nodes_data:
pods_data = self._create_request('GET', self._pods_api_url_path, self.auth_data)
if not pods_data:
_LOGGER.error("Error getting Kubernetes pod list. Node usage will not be obtained.")
for node in nodes_data["items"]:
name = node["metadata"]["name"]
memory_total = self._get_memory_in_bytes(node["status"]["allocatable"]["memory"])
slots_total = int(node["status"]["allocatable"]["cpu"])
pods_total = int(node["status"]["allocatable"]["pods"])
skip_node = False
# Get Taints
if 'taints' in node["spec"] and node["spec"]['taints']:
for taint in node["spec"]['taints']:
if taint['effect'] in ["NoSchedule", "PreferNoSchedule", "NoExecute"]:
skip_node = True
_LOGGER.debug("Node %s is tainted with %s, skiping." % (name, taint['effect']))
if not skip_node:
used_mem, used_cpus, used_pods, system_pods = self._get_node_used_resources(name, pods_data)
memory_free = memory_total - used_mem
slots_free = slots_total - used_cpus
pods_free = pods_total - used_pods
is_ready = True
for conditions in node["status"]["conditions"]:
if conditions['type'] == "Ready":
if conditions['status'] != "True":
is_ready = False
keywords = {'pods_free': TypedNumber(pods_free),
'nodeName': TypedClass(name, TypedClass.STRING)}
# Add labels as keywords
for key, value in list(node["metadata"]["labels"].items()):
keywords[key] = TypedClass(value, TypedClass.STRING)
nodeinfolist[name] = NodeInfo(name, slots_total, slots_free, memory_total, memory_free, keywords)
if is_ready:
nodeinfolist[name].state = NodeInfo.IDLE
if (used_pods - system_pods) > 0:
nodeinfolist[name].state = NodeInfo.USED
else:
nodeinfolist[name].state = NodeInfo.OFF
else:
_LOGGER.error("Error getting Kubernetes node list.")
# Add the "virtual" nodes
try:
vnodes = json.load(open('/etc/clues2/kubernetes_vnodes.info', 'r'))
for vnode in vnodes:
name = vnode["name"]
if name not in nodeinfolist:
keywords = {'pods_free': TypedNumber(self._node_pods),
'nodeName': TypedClass(name, TypedClass.STRING)}
cpus = self._node_slots
if "cpu" in vnode:
cpus = int(vnode["cpu"])
memory = self._node_memory
if "memory" in vnode:
memory = self._get_memory_in_bytes(vnode["memory"])
if "queues" in vnode:
queues = vnode["queues"].split(",")
if queues:
keywords['queues'] = TypedList([TypedClass.auto(q) for q in queues])
if "keywords" in vnode:
for keypair in vnode["keywords"].split(','):
parts = keypair.split('=')
keywords[parts[0].strip()] = TypedClass(parts[1].strip(), TypedClass.STRING)
nodeinfolist[name] = NodeInfo(name, cpus, cpus, memory, memory, keywords)
nodeinfolist[name].state = NodeInfo.OFF
except Exception as ex:
_LOGGER.error("Error processing file /etc/clues2/kubernetes_vnodes.info: %s" % str(ex))
return nodeinfolist
def _get_cpu_float(self, cpu_info):
if cpu_info.strip()[-1:] == "m":
return float(cpu_info.strip()[:-1]) / 1000.0
else:
return float(cpu_info)
def _get_pod_cpus_and_memory(self, pod):
cpus = 0.0
memory = 0
for cont in pod["spec"]["containers"]:
if "resources" in cont:
if "requests" in cont["resources"]:
if "cpu" in cont["resources"]["requests"]:
cpus += self._get_cpu_float(cont["resources"]["requests"]["cpu"])
if "memory" in cont["resources"]["requests"]:
memory += self._get_memory_in_bytes(cont["resources"]["requests"]["memory"])
return cpus, memory
def get_jobinfolist(self):
'''Method in charge of monitoring the job queue of Mesos plus Marathon
The Mesos info about jobs has to be obtained from frameworks and not from tasks,
because if there are not available resources to execute new tasks, Mesos
do not create them but frameworks are created
'''
jobinfolist = []
pods_data = self._create_request('GET', self._pods_api_url_path, self.auth_data)
if pods_data:
for pod in pods_data["items"]:
if pod["metadata"]["namespace"] != "kube-system":
job_id = pod["metadata"]["uid"]
state = pod["status"]["phase"] # Pending, Running, Succeeded, Failed or Unknown
hostIP = None
if "hostIP" in pod["status"]:
hostIP = pod["status"]["hostIP"] # Pending, Running, Succeeded, Failed or Unknown
job_state = Request.UNKNOWN
if state == "Pending":
job_state = Request.PENDING
if hostIP:
job_state = Request.SERVED
elif state in ["Running", "Succeeded", "Failed"]:
job_state = Request.SERVED
cpus, memory = self._get_pod_cpus_and_memory(pod)
req_str = '(pods_free > 0)'
if 'nodeName' in pod["spec"] and pod["spec"]["nodeName"]:
req_str += ' && (nodeName = "%s")' % pod["spec"]["nodeName"]
# Add node selector labels
if 'nodeSelector' in pod['spec'] and pod['spec']['nodeSelector']:
for key, value in list(pod['spec']['nodeSelector'].items()):
req_str += ' && (%s == "%s")' % (key, value)
resources = ResourcesNeeded(cpus, memory, [req_str], 1)
job_info = JobInfo(resources, job_id, 1)
job_info.set_state(job_state)
jobinfolist.append(job_info)
else:
_LOGGER.error("Error getting Kubernetes pod list")
return jobinfolist
if __name__ == '__main__':
pass
| gpl-3.0 |
ekalosak/boto | tests/unit/auth/test_sigv4.py | 75 | 23443 | # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import copy
import pickle
import os
from tests.compat import unittest, mock
from tests.unit import MockServiceWithConfigTestCase
from boto.auth import HmacAuthV4Handler
from boto.auth import S3HmacAuthV4Handler
from boto.auth import detect_potential_s3sigv4
from boto.auth import detect_potential_sigv4
from boto.connection import HTTPRequest
from boto.provider import Provider
from boto.regioninfo import RegionInfo
class TestSigV4Handler(unittest.TestCase):
def setUp(self):
self.provider = mock.Mock()
self.provider.access_key = 'access_key'
self.provider.secret_key = 'secret_key'
self.request = HTTPRequest(
'POST', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'/-/vaults/foo/archives', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
def test_not_adding_empty_qs(self):
self.provider.security_token = None
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com', mock.Mock(), self.provider)
req = copy.copy(self.request)
auth.add_auth(req)
self.assertEqual(req.path, '/-/vaults/foo/archives')
def test_inner_whitespace_is_collapsed(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
self.request.headers['x-amz-archive-description'] = 'two spaces'
self.request.headers['x-amz-quoted-string'] = ' "a b c" '
headers = auth.headers_to_sign(self.request)
self.assertEqual(headers, {'Host': 'glacier.us-east-1.amazonaws.com',
'x-amz-archive-description': 'two spaces',
'x-amz-glacier-version': '2012-06-01',
'x-amz-quoted-string': ' "a b c" '})
# Note the single space between the "two spaces".
self.assertEqual(auth.canonical_headers(headers),
'host:glacier.us-east-1.amazonaws.com\n'
'x-amz-archive-description:two spaces\n'
'x-amz-glacier-version:2012-06-01\n'
'x-amz-quoted-string:"a b c"')
def test_canonical_query_string(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'/-/vaults/foo/archives', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
request.params['Foo.1'] = 'aaa'
request.params['Foo.10'] = 'zzz'
query_string = auth.canonical_query_string(request)
self.assertEqual(query_string, 'Foo.1=aaa&Foo.10=zzz')
def test_query_string(self):
auth = HmacAuthV4Handler('sns.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
params = {
'Message': u'We \u2665 utf-8'.encode('utf-8'),
}
request = HTTPRequest(
'POST', 'https', 'sns.us-east-1.amazonaws.com', 443,
'/', None, params, {}, '')
query_string = auth.query_string(request)
self.assertEqual(query_string, 'Message=We%20%E2%99%A5%20utf-8')
def test_canonical_uri(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'x/./././x .html', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
canonical_uri = auth.canonical_uri(request)
# This should be both normalized & urlencoded.
self.assertEqual(canonical_uri, 'x/x%20.html')
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'x/./././x/html/', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
canonical_uri = auth.canonical_uri(request)
# Trailing slashes should be preserved.
self.assertEqual(canonical_uri, 'x/x/html/')
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'/', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
canonical_uri = auth.canonical_uri(request)
# There should not be two-slashes.
self.assertEqual(canonical_uri, '/')
# Make sure Windows-style slashes are converted properly
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'\\x\\x.html', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
canonical_uri = auth.canonical_uri(request)
self.assertEqual(canonical_uri, '/x/x.html')
def test_credential_scope(self):
# test the AWS standard regions IAM endpoint
auth = HmacAuthV4Handler('iam.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'POST', 'https', 'iam.amazonaws.com', 443,
'/', '/',
{'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
{
'Content-Length': '44',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Amz-Date': '20130808T013210Z'
},
'Action=ListAccountAliases&Version=2010-05-08')
credential_scope = auth.credential_scope(request)
region_name = credential_scope.split('/')[1]
self.assertEqual(region_name, 'us-east-1')
# test the AWS GovCloud region IAM endpoint
auth = HmacAuthV4Handler('iam.us-gov.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'POST', 'https', 'iam.us-gov.amazonaws.com', 443,
'/', '/',
{'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
{
'Content-Length': '44',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Amz-Date': '20130808T013210Z'
},
'Action=ListAccountAliases&Version=2010-05-08')
credential_scope = auth.credential_scope(request)
region_name = credential_scope.split('/')[1]
self.assertEqual(region_name, 'us-gov-west-1')
# iam.us-west-1.amazonaws.com does not exist however this
# covers the remaining region_name control structure for a
# different region name
auth = HmacAuthV4Handler('iam.us-west-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'POST', 'https', 'iam.us-west-1.amazonaws.com', 443,
'/', '/',
{'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
{
'Content-Length': '44',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Amz-Date': '20130808T013210Z'
},
'Action=ListAccountAliases&Version=2010-05-08')
credential_scope = auth.credential_scope(request)
region_name = credential_scope.split('/')[1]
self.assertEqual(region_name, 'us-west-1')
# Test connections to custom locations, e.g. localhost:8080
auth = HmacAuthV4Handler('localhost', mock.Mock(), self.provider,
service_name='iam')
request = HTTPRequest(
'POST', 'http', 'localhost', 8080,
'/', '/',
{'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
{
'Content-Length': '44',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Amz-Date': '20130808T013210Z'
},
'Action=ListAccountAliases&Version=2010-05-08')
credential_scope = auth.credential_scope(request)
timestamp, region, service, v = credential_scope.split('/')
self.assertEqual(region, 'localhost')
self.assertEqual(service, 'iam')
def test_headers_to_sign(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'GET', 'http', 'glacier.us-east-1.amazonaws.com', 80,
'x/./././x .html', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
headers = auth.headers_to_sign(request)
# Port 80 & not secure excludes the port.
self.assertEqual(headers['Host'], 'glacier.us-east-1.amazonaws.com')
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
'x/./././x .html', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
headers = auth.headers_to_sign(request)
# SSL port excludes the port.
self.assertEqual(headers['Host'], 'glacier.us-east-1.amazonaws.com')
request = HTTPRequest(
'GET', 'https', 'glacier.us-east-1.amazonaws.com', 8080,
'x/./././x .html', None, {},
{'x-amz-glacier-version': '2012-06-01'}, '')
headers = auth.headers_to_sign(request)
# URL should include port.
self.assertEqual(headers['Host'], 'glacier.us-east-1.amazonaws.com:8080')
def test_region_and_service_can_be_overriden(self):
auth = HmacAuthV4Handler('queue.amazonaws.com',
mock.Mock(), self.provider)
self.request.headers['X-Amz-Date'] = '20121121000000'
auth.region_name = 'us-west-2'
auth.service_name = 'sqs'
scope = auth.credential_scope(self.request)
self.assertEqual(scope, '20121121/us-west-2/sqs/aws4_request')
def test_pickle_works(self):
provider = Provider('aws', access_key='access_key',
secret_key='secret_key')
auth = HmacAuthV4Handler('queue.amazonaws.com', None, provider)
# Pickle it!
pickled = pickle.dumps(auth)
# Now restore it
auth2 = pickle.loads(pickled)
self.assertEqual(auth.host, auth2.host)
def test_bytes_header(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
mock.Mock(), self.provider)
request = HTTPRequest(
'GET', 'http', 'glacier.us-east-1.amazonaws.com', 80,
'x/./././x .html', None, {},
{'x-amz-glacier-version': '2012-06-01', 'x-amz-hash': b'f00'}, '')
canonical = auth.canonical_request(request)
self.assertIn('f00', canonical)
class TestS3HmacAuthV4Handler(unittest.TestCase):
def setUp(self):
self.provider = mock.Mock()
self.provider.access_key = 'access_key'
self.provider.secret_key = 'secret_key'
self.provider.security_token = 'sekret_tokens'
self.request = HTTPRequest(
'GET', 'https', 's3-us-west-2.amazonaws.com', 443,
'/awesome-bucket/?max-keys=0', None, {},
{}, ''
)
self.awesome_bucket_request = HTTPRequest(
method='GET',
protocol='https',
host='awesome-bucket.s3-us-west-2.amazonaws.com',
port=443,
path='/',
auth_path=None,
params={
'max-keys': 0,
},
headers={
'User-Agent': 'Boto',
'X-AMZ-Content-sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-AMZ-Date': '20130605T193245Z',
},
body=''
)
self.auth = S3HmacAuthV4Handler(
host='awesome-bucket.s3-us-west-2.amazonaws.com',
config=mock.Mock(),
provider=self.provider,
region_name='s3-us-west-2'
)
def test_clean_region_name(self):
# Untouched.
cleaned = self.auth.clean_region_name('us-west-2')
self.assertEqual(cleaned, 'us-west-2')
# Stripped of the ``s3-`` prefix.
cleaned = self.auth.clean_region_name('s3-us-west-2')
self.assertEqual(cleaned, 'us-west-2')
# Untouched (classic).
cleaned = self.auth.clean_region_name('s3.amazonaws.com')
self.assertEqual(cleaned, 's3.amazonaws.com')
# Untouched.
cleaned = self.auth.clean_region_name('something-s3-us-west-2')
self.assertEqual(cleaned, 'something-s3-us-west-2')
def test_region_stripping(self):
auth = S3HmacAuthV4Handler(
host='s3-us-west-2.amazonaws.com',
config=mock.Mock(),
provider=self.provider
)
self.assertEqual(auth.region_name, None)
# What we wish we got.
auth = S3HmacAuthV4Handler(
host='s3-us-west-2.amazonaws.com',
config=mock.Mock(),
provider=self.provider,
region_name='us-west-2'
)
self.assertEqual(auth.region_name, 'us-west-2')
# What we actually get (i.e. ``s3-us-west-2``).
self.assertEqual(self.auth.region_name, 'us-west-2')
def test_determine_region_name(self):
name = self.auth.determine_region_name('s3-us-west-2.amazonaws.com')
self.assertEqual(name, 'us-west-2')
def test_canonical_uri(self):
request = HTTPRequest(
'GET', 'https', 's3-us-west-2.amazonaws.com', 443,
'x/./././x .html', None, {},
{}, ''
)
canonical_uri = self.auth.canonical_uri(request)
# S3 doesn't canonicalize the way other SigV4 services do.
# This just urlencoded, no normalization of the path.
self.assertEqual(canonical_uri, 'x/./././x%20.html')
def test_determine_service_name(self):
# What we wish we got.
name = self.auth.determine_service_name(
's3.us-west-2.amazonaws.com'
)
self.assertEqual(name, 's3')
# What we actually get.
name = self.auth.determine_service_name(
's3-us-west-2.amazonaws.com'
)
self.assertEqual(name, 's3')
# What we wish we got with virtual hosting.
name = self.auth.determine_service_name(
'bucket.s3.us-west-2.amazonaws.com'
)
self.assertEqual(name, 's3')
# What we actually get with virtual hosting.
name = self.auth.determine_service_name(
'bucket.s3-us-west-2.amazonaws.com'
)
self.assertEqual(name, 's3')
def test_add_auth(self):
# The side-effects sideshow.
self.assertFalse('x-amz-content-sha256' in self.request.headers)
self.auth.add_auth(self.request)
self.assertTrue('x-amz-content-sha256' in self.request.headers)
the_sha = self.request.headers['x-amz-content-sha256']
self.assertEqual(
the_sha,
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
)
def test_host_header(self):
host = self.auth.host_header(
self.awesome_bucket_request.host,
self.awesome_bucket_request
)
self.assertEqual(host, 'awesome-bucket.s3-us-west-2.amazonaws.com')
def test_canonical_query_string(self):
qs = self.auth.canonical_query_string(self.awesome_bucket_request)
self.assertEqual(qs, 'max-keys=0')
def test_correct_handling_of_plus_sign(self):
request = HTTPRequest(
'GET', 'https', 's3-us-west-2.amazonaws.com', 443,
'hello+world.txt', None, {},
{}, ''
)
canonical_uri = self.auth.canonical_uri(request)
# Ensure that things are properly quoted.
self.assertEqual(canonical_uri, 'hello%2Bworld.txt')
request = HTTPRequest(
'GET', 'https', 's3-us-west-2.amazonaws.com', 443,
'hello%2Bworld.txt', None, {},
{}, ''
)
canonical_uri = self.auth.canonical_uri(request)
# Verify double escaping hasn't occurred.
self.assertEqual(canonical_uri, 'hello%2Bworld.txt')
def test_mangle_path_and_params(self):
request = HTTPRequest(
method='GET',
protocol='https',
host='awesome-bucket.s3-us-west-2.amazonaws.com',
port=443,
# LOOK AT THIS PATH. JUST LOOK AT IT.
path='/?delete&max-keys=0',
auth_path=None,
params={
'key': 'why hello there',
# This gets overwritten, to make sure back-compat is maintained.
'max-keys': 1,
},
headers={
'User-Agent': 'Boto',
'X-AMZ-Content-sha256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'X-AMZ-Date': '20130605T193245Z',
},
body=''
)
mod_req = self.auth.mangle_path_and_params(request)
self.assertEqual(mod_req.path, '/?delete&max-keys=0')
self.assertEqual(mod_req.auth_path, '/')
self.assertEqual(mod_req.params, {
'max-keys': '0',
'key': 'why hello there',
'delete': ''
})
def test_canonical_request(self):
expected = """GET
/
max-keys=0
host:awesome-bucket.s3-us-west-2.amazonaws.com
user-agent:Boto
x-amz-content-sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
x-amz-date:20130605T193245Z
host;user-agent;x-amz-content-sha256;x-amz-date
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"""
authed_req = self.auth.canonical_request(self.awesome_bucket_request)
self.assertEqual(authed_req, expected)
# Now the way ``boto.s3`` actually sends data.
request = copy.copy(self.awesome_bucket_request)
request.path = request.auth_path = '/?max-keys=0'
request.params = {}
expected = """GET
/
max-keys=0
host:awesome-bucket.s3-us-west-2.amazonaws.com
user-agent:Boto
x-amz-content-sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
x-amz-date:20130605T193245Z
host;user-agent;x-amz-content-sha256;x-amz-date
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"""
# Pre-mangle it. In practice, this happens as part of ``add_auth``,
# but that's a side-effect that's hard to test.
request = self.auth.mangle_path_and_params(request)
authed_req = self.auth.canonical_request(request)
self.assertEqual(authed_req, expected)
def test_non_string_headers(self):
self.awesome_bucket_request.headers['Content-Length'] = 8
canonical_headers = self.auth.canonical_headers(
self.awesome_bucket_request.headers)
self.assertEqual(
canonical_headers,
'content-length:8\n'
'user-agent:Boto\n'
'x-amz-content-sha256:e3b0c44298fc1c149afbf4c8996fb92427ae'
'41e4649b934ca495991b7852b855\n'
'x-amz-date:20130605T193245Z'
)
class FakeS3Connection(object):
def __init__(self, *args, **kwargs):
self.host = kwargs.pop('host', None)
@detect_potential_s3sigv4
def _required_auth_capability(self):
return ['nope']
def _mexe(self, *args, **kwargs):
pass
class FakeEC2Connection(object):
def __init__(self, *args, **kwargs):
self.region = kwargs.pop('region', None)
@detect_potential_sigv4
def _required_auth_capability(self):
return ['nope']
def _mexe(self, *args, **kwargs):
pass
class TestS3SigV4OptIn(MockServiceWithConfigTestCase):
connection_class = FakeS3Connection
def test_sigv4_opt_out(self):
# Default is opt-out.
fake = FakeS3Connection(host='s3.amazonaws.com')
self.assertEqual(fake._required_auth_capability(), ['nope'])
def test_sigv4_non_optional(self):
# Requires SigV4.
for region in ['.cn-north', '.eu-central', '-eu-central']:
fake = FakeS3Connection(host='s3' + region + '-1.amazonaws.com')
self.assertEqual(
fake._required_auth_capability(), ['hmac-v4-s3'])
def test_sigv4_opt_in_config(self):
# Opt-in via the config.
self.config = {
's3': {
'use-sigv4': True,
},
}
fake = FakeS3Connection()
self.assertEqual(fake._required_auth_capability(), ['hmac-v4-s3'])
def test_sigv4_opt_in_env(self):
# Opt-in via the ENV.
self.environ['S3_USE_SIGV4'] = True
fake = FakeS3Connection(host='s3.amazonaws.com')
self.assertEqual(fake._required_auth_capability(), ['hmac-v4-s3'])
class TestSigV4OptIn(MockServiceWithConfigTestCase):
connection_class = FakeEC2Connection
def setUp(self):
super(TestSigV4OptIn, self).setUp()
self.standard_region = RegionInfo(
name='us-west-2',
endpoint='ec2.us-west-2.amazonaws.com'
)
self.sigv4_region = RegionInfo(
name='cn-north-1',
endpoint='ec2.cn-north-1.amazonaws.com.cn'
)
def test_sigv4_opt_out(self):
# Default is opt-out.
fake = FakeEC2Connection(region=self.standard_region)
self.assertEqual(fake._required_auth_capability(), ['nope'])
def test_sigv4_non_optional(self):
# Requires SigV4.
fake = FakeEC2Connection(region=self.sigv4_region)
self.assertEqual(fake._required_auth_capability(), ['hmac-v4'])
def test_sigv4_opt_in_config(self):
# Opt-in via the config.
self.config = {
'ec2': {
'use-sigv4': True,
},
}
fake = FakeEC2Connection(region=self.standard_region)
self.assertEqual(fake._required_auth_capability(), ['hmac-v4'])
def test_sigv4_opt_in_env(self):
# Opt-in via the ENV.
self.environ['EC2_USE_SIGV4'] = True
fake = FakeEC2Connection(region=self.standard_region)
self.assertEqual(fake._required_auth_capability(), ['hmac-v4'])
| mit |
nesdis/djongo | tests/django_tests/tests/v21/tests/utils_tests/test_hashable.py | 50 | 1177 | from django.test import SimpleTestCase
from django.utils.hashable import make_hashable
class TestHashable(SimpleTestCase):
def test_equal(self):
tests = (
([], ()),
(['a', 1], ('a', 1)),
({}, ()),
({'a'}, ('a',)),
(frozenset({'a'}), {'a'}),
({'a': 1}, (('a', 1),)),
(('a', ['b', 1]), ('a', ('b', 1))),
(('a', {'b': 1}), ('a', (('b', 1),))),
)
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(make_hashable(value), expected)
def test_count_equal(self):
tests = (
({'a': 1, 'b': ['a', 1]}, (('a', 1), ('b', ('a', 1)))),
({'a': 1, 'b': ('a', [1, 2])}, (('a', 1), ('b', ('a', (1, 2))))),
)
for value, expected in tests:
with self.subTest(value=value):
self.assertCountEqual(make_hashable(value), expected)
def test_unhashable(self):
class Unhashable:
__hash__ = None
with self.assertRaisesMessage(TypeError, "unhashable type: 'Unhashable'"):
make_hashable(Unhashable())
| agpl-3.0 |
helloweishi/ostinato | test/rpctest.py | 20 | 18805 | #! /usr/bin/env python
# standard modules
import logging
import os
import subprocess
import sys
import time
sys.path.insert(1, '../binding')
from core import ost_pb, DroneProxy
from rpc import RpcError
from protocols.mac_pb2 import mac
from protocols.ip4_pb2 import ip4, Ip4
class Test:
pass
class TestSuite:
def __init__(self):
self.results = []
self.total = 0
self.passed = 0
self.completed = False
def test_begin(self, name):
test = Test()
test.name = name
test.passed = False
self.running = test
print('-----------------------------------------------------------')
print('@@TEST: %s' % name)
print('-----------------------------------------------------------')
def test_end(self, result):
if self.running:
self.running.passed = result
self.results.append(self.running)
self.total = self.total + 1
if result:
self.passed = self.passed + 1
self.running = None
print('@@RESULT: %s' % ('PASS' if result else 'FAIL'))
else:
raise Exception('Test end without a test begin')
def report(self):
print('===========================================================')
print('TEST REPORT')
print('===========================================================')
for test in self.results:
print('%s: %d' % (test.name, test.passed))
print('Passed: %d/%d' % (self.passed, self.total))
print('Completed: %d' % (self.completed))
def complete(self):
self.completed = True
def passed(self):
return passed == total and self.completed
# initialize defaults
host_name = '127.0.0.1'
tx_port_number = -1
rx_port_number = -1
drone_version = ['0', '0', '0']
if sys.platform == 'win32':
tshark = r'C:\Program Files\Wireshark\tshark.exe'
else:
tshark = 'tshark'
# setup logging
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
print('')
print('This test uses the following topology -')
print('')
print(' +-------+ ')
print(' | |Tx--->----+')
print(' | Drone | |')
print(' | |Rx---<----+')
print(' +-------+ ')
print('')
print('A loopback port is used as both the Tx and Rx ports')
print('')
suite = TestSuite()
drone = DroneProxy(host_name)
try:
# ----------------------------------------------------------------- #
# TESTCASE: Verify any RPC before checkVersion() fails and the server
# closes the connection
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('anyRpcBeforeCheckVersionFails')
drone.channel.connect(drone.host, drone.port)
try:
port_id_list = drone.getPortIdList()
except RpcError as e:
if ('compatibility check pending' in str(e)):
passed = True
else:
raise
finally:
drone.channel.disconnect()
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify DroneProxy.connect() fails for incompatible version
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('connectFailsForIncompatibleVersion')
try:
drone.proxy_version = '0.1.1'
drone.connect()
except RpcError as e:
if ('needs client version' in str(e)):
passed = True
drone_version = str(e).split()[-1].split('.')
else:
raise
finally:
drone.proxy_version = None
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify checkVersion() fails for invalid client version format
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('checkVersionFailsForInvalidClientVersion')
try:
drone.proxy_version = '0-1-1'
drone.connect()
except RpcError as e:
if ('invalid version' in str(e)):
passed = True
else:
raise
finally:
drone.proxy_version = None
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify checkVersion() returns incompatible if the 'major'
# part of the <major.minor.revision> numbering format is
# different than the server's version and the server closes
# the connection
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('checkVersionReturnsIncompatForDifferentMajorVersion')
try:
drone.proxy_version = (str(int(drone_version[0])+1)
+ '.' + drone_version[1])
drone.connect()
except RpcError as e:
#FIXME: How to check for a closed connection?
if ('needs client version' in str(e)):
passed = True
else:
raise
finally:
drone.proxy_version = None
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify checkVersion() returns incompatible if the 'minor'
# part of the <major.minor.revision> numbering format is
# different than the server's version and the server closes
# the connection
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('checkVersionReturnsIncompatForDifferentMinorVersion')
try:
drone.proxy_version = (drone_version[0]
+ '.' + str(int(drone_version[1])+1))
drone.connect()
except RpcError as e:
#FIXME: How to check for a closed connection?
if ('needs client version' in str(e)):
passed = True
else:
raise
finally:
drone.proxy_version = None
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify checkVersion() returns compatible if the 'revision'
# part of the <major.minor.revision> numbering format is
# different than the server's version
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('checkVersionReturnsCompatForDifferentRevisionVersion')
try:
drone.proxy_version = (drone_version[0]
+ '.' + drone_version[1]
+ '.' + '999')
drone.connect()
passed = True
except RpcError as e:
raise
finally:
drone.proxy_version = None
suite.test_end(passed)
# ----------------------------------------------------------------- #
# Baseline Configuration for subsequent testcases
# ----------------------------------------------------------------- #
# connect to drone
log.info('connecting to drone(%s:%d)'
% (drone.hostName(), drone.portNumber()))
drone.connect()
# retreive port id list
log.info('retreiving port list')
port_id_list = drone.getPortIdList()
# retreive port config list
log.info('retreiving port config for all ports')
port_config_list = drone.getPortConfig(port_id_list)
if len(port_config_list.port) == 0:
log.warning('drone has no ports!')
sys.exit(1)
# iterate port list to find a loopback port to use as the tx/rx port id
print('Port List')
print('---------')
for port in port_config_list.port:
print('%d.%s (%s)' % (port.port_id.id, port.name, port.description))
# use a loopback port as default tx/rx port
if ('lo' in port.name or 'loopback' in port.description.lower()):
tx_port_number = port.port_id.id
rx_port_number = port.port_id.id
if tx_port_number < 0 or rx_port_number < 0:
log.warning('loopback port not found')
sys.exit(1)
print('Using port %d as tx/rx port(s)' % tx_port_number)
tx_port = ost_pb.PortIdList()
tx_port.port_id.add().id = tx_port_number;
rx_port = ost_pb.PortIdList()
rx_port.port_id.add().id = rx_port_number;
# add a stream
stream_id = ost_pb.StreamIdList()
stream_id.port_id.CopyFrom(tx_port.port_id[0])
stream_id.stream_id.add().id = 1
log.info('adding tx_stream %d' % stream_id.stream_id[0].id)
drone.addStream(stream_id)
# configure the stream
stream_cfg = ost_pb.StreamConfigList()
stream_cfg.port_id.CopyFrom(tx_port.port_id[0])
s = stream_cfg.stream.add()
s.stream_id.id = stream_id.stream_id[0].id
s.core.is_enabled = True
s.control.num_packets = 10
# setup stream protocols as mac:eth2:ip4:udp:payload
p = s.protocol.add()
p.protocol_id.id = ost_pb.Protocol.kMacFieldNumber
p.Extensions[mac].dst_mac = 0x001122334455
p.Extensions[mac].src_mac = 0x00aabbccddee
p = s.protocol.add()
p.protocol_id.id = ost_pb.Protocol.kEth2FieldNumber
p = s.protocol.add()
p.protocol_id.id = ost_pb.Protocol.kIp4FieldNumber
# reduce typing by creating a shorter reference to p.Extensions[ip4]
ip = p.Extensions[ip4]
ip.src_ip = 0x01020304
ip.dst_ip = 0x05060708
ip.dst_ip_mode = Ip4.e_im_inc_host
s.protocol.add().protocol_id.id = ost_pb.Protocol.kUdpFieldNumber
s.protocol.add().protocol_id.id = ost_pb.Protocol.kPayloadFieldNumber
log.info('configuring tx_stream %d' % stream_id.stream_id[0].id)
drone.modifyStream(stream_cfg)
# clear tx/rx stats
log.info('clearing tx/rx stats')
drone.clearStats(tx_port)
drone.clearStats(rx_port)
# ----------------------------------------------------------------- #
# TODO:
# TESTCASE: Verify a RPC with missing required fields in request fails
# and subsequently passes when the fields are initialized
# ----------------------------------------------------------------- #
# passed = False
# suite.test_begin('rpcWithMissingRequiredFieldsFails')
# pid = ost_pb.PortId()
# try:
# sid_list = drone.getStreamIdList(pid)
# except RpcError as e:
# if ('missing required fields in request' in str(e)):
# passed = True
# else:
# raise
#
# try:
# pid.id = tx_port_number
# sid_list = drone.getStreamIdList(pid)
# except RpcError as e:
# passed = False
# raise
# finally:
# suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify invoking addStream() during transmit fails
# TESTCASE: Verify invoking modifyStream() during transmit fails
# TESTCASE: Verify invoking deleteStream() during transmit fails
# ----------------------------------------------------------------- #
sid = ost_pb.StreamIdList()
sid.port_id.CopyFrom(tx_port.port_id[0])
sid.stream_id.add().id = 2
passed = False
suite.test_begin('addStreamDuringTransmitFails')
drone.startTransmit(tx_port)
try:
log.info('adding tx_stream %d' % sid.stream_id[0].id)
drone.addStream(sid)
except RpcError as e:
if ('Port Busy' in str(e)):
passed = True
else:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
passed = False
suite.test_begin('modifyStreamDuringTransmitFails')
scfg = ost_pb.StreamConfigList()
scfg.port_id.CopyFrom(tx_port.port_id[0])
s = scfg.stream.add()
s.stream_id.id = sid.stream_id[0].id
s.protocol.add().protocol_id.id = ost_pb.Protocol.kMacFieldNumber
s.protocol.add().protocol_id.id = ost_pb.Protocol.kArpFieldNumber
s.protocol.add().protocol_id.id = ost_pb.Protocol.kPayloadFieldNumber
drone.startTransmit(tx_port)
try:
log.info('configuring tx_stream %d' % sid.stream_id[0].id)
drone.modifyStream(scfg)
except RpcError as e:
if ('Port Busy' in str(e)):
passed = True
else:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
passed = False
suite.test_begin('deleteStreamDuringTransmitFails')
drone.startTransmit(tx_port)
try:
log.info('deleting tx_stream %d' % sid.stream_id[0].id)
drone.deleteStream(sid)
except RpcError as e:
if ('Port Busy' in str(e)):
passed = True
else:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify invoking startTransmit() during transmit is a NOP,
# not a restart
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('startTransmitDuringTransmitIsNopNotRestart')
drone.startCapture(rx_port)
drone.startTransmit(tx_port)
try:
log.info('sleeping for 4s ...')
time.sleep(4)
log.info('starting transmit multiple times')
drone.startTransmit(tx_port)
time.sleep(1)
drone.startTransmit(tx_port)
time.sleep(1)
drone.startTransmit(tx_port)
time.sleep(1)
log.info('waiting for transmit to finish ...')
time.sleep(5)
drone.stopTransmit(tx_port)
drone.stopCapture(rx_port)
buff = drone.getCaptureBuffer(rx_port.port_id[0])
drone.saveCaptureBuffer(buff, 'capture.pcap')
log.info('dumping Rx capture buffer')
cap_pkts = subprocess.check_output([tshark, '-r', 'capture.pcap'])
print(cap_pkts)
if '5.6.7.8' in cap_pkts:
passed = True
os.remove('capture.pcap')
except RpcError as e:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify invoking startCapture() during capture is a NOP,
# not a restart
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('startCaptureDuringTransmitIsNopNotRestart')
try:
drone.startCapture(rx_port)
drone.startTransmit(tx_port)
log.info('sleeping for 4s ...')
time.sleep(4)
log.info('starting capture multiple times')
drone.startCapture(rx_port)
time.sleep(1)
drone.startCapture(rx_port)
time.sleep(1)
drone.startCapture(rx_port)
time.sleep(1)
log.info('waiting for transmit to finish ...')
time.sleep(5)
drone.stopTransmit(tx_port)
drone.stopCapture(rx_port)
buff = drone.getCaptureBuffer(rx_port.port_id[0])
drone.saveCaptureBuffer(buff, 'capture.pcap')
log.info('dumping Rx capture buffer')
cap_pkts = subprocess.check_output([tshark, '-r', 'capture.pcap'])
print(cap_pkts)
if '5.6.7.8' in cap_pkts:
passed = True
os.remove('capture.pcap')
except RpcError as e:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify invoking stopTransmit() when transmit is not running
# is a NOP
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('stopTransmitWhenTransmitNotRunningIsNop')
try:
tx_stats = drone.getStats(tx_port)
log.info('--> (tx_stats)' + tx_stats.__str__())
if tx_stats.port_stats[0].state.is_transmit_on:
raise Exception('Unexpected transmit ON state')
log.info('stopping transmit multiple times')
drone.stopTransmit(tx_port)
time.sleep(1)
drone.stopTransmit(tx_port)
time.sleep(1)
drone.stopTransmit(tx_port)
# if we reached here, that means there was no exception
passed = True
except RpcError as e:
raise
finally:
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify invoking stopCapture() when capture is not running
# is a NOP
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('stopCaptureWhenCaptureNotRunningIsNop')
try:
rx_stats = drone.getStats(rx_port)
log.info('--> (rx_stats)' + rx_stats.__str__())
if rx_stats.port_stats[0].state.is_capture_on:
raise Exception('Unexpected capture ON state')
log.info('stopping capture multiple times')
drone.stopCapture(rx_port)
time.sleep(1)
drone.stopCapture(rx_port)
time.sleep(1)
drone.stopCapture(rx_port)
# if we reached here, that means there was no exception
passed = True
except RpcError as e:
raise
finally:
suite.test_end(passed)
# ----------------------------------------------------------------- #
# TESTCASE: Verify startCapture(), startTransmit() sequence captures the
# first packet
# TESTCASE: Verify stopTransmit(), stopCapture() sequence captures the
# last packet
# ----------------------------------------------------------------- #
passed = False
suite.test_begin('startStopTransmitCaptureOrderCapturesAllPackets')
try:
drone.startCapture(rx_port)
drone.startTransmit(tx_port)
log.info('waiting for transmit to finish ...')
time.sleep(12)
drone.stopTransmit(tx_port)
drone.stopCapture(rx_port)
log.info('getting Rx capture buffer')
buff = drone.getCaptureBuffer(rx_port.port_id[0])
drone.saveCaptureBuffer(buff, 'capture.pcap')
log.info('dumping Rx capture buffer')
cap_pkts = subprocess.check_output([tshark, '-r', 'capture.pcap'])
print(cap_pkts)
if '5.6.7.8' in cap_pkts and '5.6.7.17' in cap_pkts:
passed = True
os.remove('capture.pcap')
except RpcError as e:
raise
finally:
drone.stopTransmit(tx_port)
suite.test_end(passed)
suite.complete()
# delete streams
log.info('deleting tx_stream %d' % stream_id.stream_id[0].id)
drone.deleteStream(stream_id)
# bye for now
drone.disconnect()
except Exception as ex:
log.exception(ex)
finally:
suite.report()
if not suite.passed:
sys.exit(2);
| gpl-3.0 |
devs4v/devs4v-information-retrieval15 | project/venv/lib/python2.7/site-packages/nltk/cluster/gaac.py | 10 | 5812 | # Natural Language Toolkit: Group Average Agglomerative Clusterer
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Trevor Cohn <tacohn@cs.mu.oz.au>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals
try:
import numpy
except ImportError:
pass
from nltk.cluster.util import VectorSpaceClusterer, Dendrogram, cosine_distance
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class GAAClusterer(VectorSpaceClusterer):
"""
The Group Average Agglomerative starts with each of the N vectors as singleton
clusters. It then iteratively merges pairs of clusters which have the
closest centroids. This continues until there is only one cluster. The
order of merges gives rise to a dendrogram: a tree with the earlier merges
lower than later merges. The membership of a given number of clusters c, 1
<= c <= N, can be found by cutting the dendrogram at depth c.
This clusterer uses the cosine similarity metric only, which allows for
efficient speed-up in the clustering process.
"""
def __init__(self, num_clusters=1, normalise=True, svd_dimensions=None):
VectorSpaceClusterer.__init__(self, normalise, svd_dimensions)
self._num_clusters = num_clusters
self._dendrogram = None
self._groups_values = None
def cluster(self, vectors, assign_clusters=False, trace=False):
# stores the merge order
self._dendrogram = Dendrogram(
[numpy.array(vector, numpy.float64) for vector in vectors])
return VectorSpaceClusterer.cluster(self, vectors, assign_clusters, trace)
def cluster_vectorspace(self, vectors, trace=False):
# variables describing the initial situation
N = len(vectors)
cluster_len = [1]*N
cluster_count = N
index_map = numpy.arange(N)
# construct the similarity matrix
dims = (N, N)
dist = numpy.ones(dims, dtype=numpy.float)*numpy.inf
for i in range(N):
for j in range(i+1, N):
dist[i, j] = cosine_distance(vectors[i], vectors[j])
while cluster_count > max(self._num_clusters, 1):
i, j = numpy.unravel_index(dist.argmin(), dims)
if trace:
print("merging %d and %d" % (i, j))
# update similarities for merging i and j
self._merge_similarities(dist, cluster_len, i, j)
# remove j
dist[:, j] = numpy.inf
dist[j, :] = numpy.inf
# merge the clusters
cluster_len[i] = cluster_len[i]+cluster_len[j]
self._dendrogram.merge(index_map[i], index_map[j])
cluster_count -= 1
# update the index map to reflect the indexes if we
# had removed j
index_map[j+1:] -= 1
index_map[j] = N
self.update_clusters(self._num_clusters)
def _merge_similarities(self, dist, cluster_len, i, j):
# the new cluster i merged from i and j adopts the average of
# i and j's similarity to each other cluster, weighted by the
# number of points in the clusters i and j
i_weight = cluster_len[i]
j_weight = cluster_len[j]
weight_sum = i_weight+j_weight
# update for x<i
dist[:i, i] = dist[:i, i]*i_weight + dist[:i, j]*j_weight
dist[:i, i] /= weight_sum
# update for i<x<j
dist[i, i+1:j] = dist[i, i+1:j]*i_weight + dist[i+1:j, j]*j_weight
# update for i<j<x
dist[i, j+1:] = dist[i, j+1:]*i_weight + dist[j, j+1:]*j_weight
dist[i, i+1:] /= weight_sum
def update_clusters(self, num_clusters):
clusters = self._dendrogram.groups(num_clusters)
self._centroids = []
for cluster in clusters:
assert len(cluster) > 0
if self._should_normalise:
centroid = self._normalise(cluster[0])
else:
centroid = numpy.array(cluster[0])
for vector in cluster[1:]:
if self._should_normalise:
centroid += self._normalise(vector)
else:
centroid += vector
centroid /= float(len(cluster))
self._centroids.append(centroid)
self._num_clusters = len(self._centroids)
def classify_vectorspace(self, vector):
best = None
for i in range(self._num_clusters):
centroid = self._centroids[i]
dist = cosine_distance(vector, centroid)
if not best or dist < best[0]:
best = (dist, i)
return best[1]
def dendrogram(self):
"""
:return: The dendrogram representing the current clustering
:rtype: Dendrogram
"""
return self._dendrogram
def num_clusters(self):
return self._num_clusters
def __repr__(self):
return '<GroupAverageAgglomerative Clusterer n=%d>' % self._num_clusters
def demo():
"""
Non-interactive demonstration of the clusterers with simple 2-D data.
"""
from nltk.cluster import GAAClusterer
# use a set of tokens with 2D indices
vectors = [numpy.array(f) for f in [[3, 3], [1, 2], [4, 2], [4, 0], [2, 3], [3, 1]]]
# test the GAAC clusterer with 4 clusters
clusterer = GAAClusterer(4)
clusters = clusterer.cluster(vectors, True)
print('Clusterer:', clusterer)
print('Clustered:', vectors)
print('As:', clusters)
print()
# show the dendrogram
clusterer.dendrogram().show()
# classify a new vector
vector = numpy.array([3, 3])
print('classify(%s):' % vector, end=' ')
print(clusterer.classify(vector))
print()
if __name__ == '__main__':
demo()
| mit |
jni/networkx | networkx/algorithms/smetric.py | 76 | 1206 | import networkx as nx
#from networkx.generators.smax import li_smax_graph
def s_metric(G, normalized=True):
"""Return the s-metric of graph.
The s-metric is defined as the sum of the products deg(u)*deg(v)
for every edge (u,v) in G. If norm is provided construct the
s-max graph and compute it's s_metric, and return the normalized
s value
Parameters
----------
G : graph
The graph used to compute the s-metric.
normalized : bool (optional)
Normalize the value.
Returns
-------
s : float
The s-metric of the graph.
References
----------
.. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger,
Towards a Theory of Scale-Free Graphs:
Definition, Properties, and Implications (Extended Version), 2005.
http://arxiv.org/abs/cond-mat/0501169
"""
if normalized:
raise nx.NetworkXError("Normalization not implemented")
# Gmax = li_smax_graph(list(G.degree().values()))
# return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False)
# else:
return float(sum([G.degree(u)*G.degree(v) for (u,v) in G.edges_iter()]))
| bsd-3-clause |
beni55/django | tests/gis_tests/geo3d/tests.py | 17 | 12625 | from __future__ import unicode_literals
import os
import re
from unittest import skipUnless
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geos import HAS_GEOS
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
if HAS_GEOS:
from django.contrib.gis.db.models import Union, Extent3D
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D,
InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D)
if HAS_GDAL:
from django.contrib.gis.utils import LayerMapping, LayerMapError
data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
city_file = os.path.join(data_path, 'cities', 'cities.shp')
vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt')
# The coordinates of each city, with Z values corresponding to their
# altitude in meters.
city_data = (
('Houston', (-95.363151, 29.763374, 18)),
('Dallas', (-96.801611, 32.782057, 147)),
('Oklahoma City', (-97.521157, 34.464642, 380)),
('Wellington', (174.783117, -41.315268, 14)),
('Pueblo', (-104.609252, 38.255001, 1433)),
('Lawrence', (-95.235060, 38.971823, 251)),
('Chicago', (-87.650175, 41.850385, 181)),
('Victoria', (-123.305196, 48.462611, 15)),
)
# Reference mapping of city name to its altitude (Z value).
city_dict = {name: coords for name, coords in city_data}
# 3D freeway data derived from the National Elevation Dataset:
# http://seamless.usgs.gov/products/9arc.php
interstate_data = (
('I-45',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,'
'-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,'
'-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,'
'-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,'
'-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,'
'-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,'
'-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,'
'-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,'
'-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,'
'-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,'
'-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
(11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858,
15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16,
15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857,
15.435),
),
)
# Bounding box polygon for inner-loop of Houston (in projected coordinate
# system 32140), with elevation values from the National Elevation Dataset
# (see above).
bbox_data = (
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,'
'942051.75 4208366.38,941527.97 4225693.20))',
(21.71, 13.21, 9.12, 16.40, 21.71)
)
@skipUnless(HAS_GDAL, "GDAL is required for Geo3DTest.")
@skipUnlessDBFeature("gis_enabled", "supports_3d_functions")
class Geo3DTest(TestCase):
"""
Only a subset of the PostGIS routines are 3D-enabled, and this TestCase
tries to test the features that can handle 3D and that are also
available within GeoDjango. For more information, see the PostGIS docs
on the routines that support 3D:
http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
"""
def _load_interstate_data(self):
# Interstate (2D / 3D and Geographic/Projected variants)
for name, line, exp_z in interstate_data:
line_3d = GEOSGeometry(line, srid=4269)
line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269)
# Creating a geographic and projected version of the
# interstate in both 2D and 3D.
Interstate3D.objects.create(name=name, line=line_3d)
InterstateProj3D.objects.create(name=name, line=line_3d)
Interstate2D.objects.create(name=name, line=line_2d)
InterstateProj2D.objects.create(name=name, line=line_2d)
def _load_city_data(self):
for name, pnt_data in city_data:
City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326))
def _load_polygon_data(self):
bbox_wkt, bbox_z = bbox_data
bbox_2d = GEOSGeometry(bbox_wkt, srid=32140)
bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140)
Polygon2D.objects.create(name='2D BBox', poly=bbox_2d)
Polygon3D.objects.create(name='3D BBox', poly=bbox_3d)
def test_3d_hasz(self):
"""
Make sure data is 3D and has expected Z values -- shouldn't change
because of coordinate system.
"""
self._load_interstate_data()
for name, line, exp_z in interstate_data:
interstate = Interstate3D.objects.get(name=name)
interstate_proj = InterstateProj3D.objects.get(name=name)
for i in [interstate, interstate_proj]:
self.assertTrue(i.line.hasz)
self.assertEqual(exp_z, tuple(i.line.z))
self._load_city_data()
for name, pnt_data in city_data:
city = City3D.objects.get(name=name)
z = pnt_data[2]
self.assertTrue(city.point.hasz)
self.assertEqual(z, city.point.z)
def test_3d_polygons(self):
"""
Test the creation of polygon 3D models.
"""
self._load_polygon_data()
p3d = Polygon3D.objects.get(name='3D BBox')
self.assertTrue(p3d.poly.hasz)
self.assertIsInstance(p3d.poly, Polygon)
self.assertEqual(p3d.poly.srid, 32140)
def test_3d_layermapping(self):
"""
Testing LayerMapping on 3D models.
"""
point_mapping = {'point': 'POINT'}
mpoint_mapping = {'mpoint': 'MULTIPOINT'}
# The VRT is 3D, but should still be able to map sans the Z.
lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point2D.objects.count())
# The city shapefile is 2D, and won't be able to fill the coordinates
# in the 3D model -- thus, a LayerMapError is raised.
self.assertRaises(LayerMapError, LayerMapping,
Point3D, city_file, point_mapping, transform=False)
# 3D model should take 3D data just fine.
lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point3D.objects.count())
# Making sure LayerMapping.make_multi works right, by converting
# a Point25D into a MultiPoint25D.
lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False)
lm.save()
self.assertEqual(3, MultiPoint3D.objects.count())
def test_kml(self):
"""
Test GeoQuerySet.kml() with Z values.
"""
self._load_city_data()
h = City3D.objects.kml(precision=6).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
def test_geojson(self):
"""
Test GeoQuerySet.geojson() with Z values.
"""
self._load_city_data()
h = City3D.objects.geojson(precision=6).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
def test_union(self):
"""
Testing the Union aggregate of 3D models.
"""
# PostGIS query that returned the reference EWKT for this test:
# `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;`
self._load_city_data()
ref_ewkt = (
'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,'
'-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,'
'-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
)
ref_union = GEOSGeometry(ref_ewkt)
union = City3D.objects.aggregate(Union('point'))['point__union']
self.assertTrue(union.hasz)
# Ordering of points in the resulting geometry may vary between implementations
self.assertSetEqual({p.ewkt for p in ref_union}, {p.ewkt for p in union})
@ignore_warnings(category=RemovedInDjango20Warning)
def test_extent(self):
"""
Testing the Extent3D aggregate for 3D models.
"""
self._load_city_data()
# `SELECT ST_Extent3D(point) FROM geo3d_city3d;`
ref_extent3d = (-123.305196, -41.315268, 14, 174.783117, 48.462611, 1433)
extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d']
extent2 = City3D.objects.extent3d()
def check_extent3d(extent3d, tol=6):
for ref_val, ext_val in zip(ref_extent3d, extent3d):
self.assertAlmostEqual(ref_val, ext_val, tol)
for e3d in [extent1, extent2]:
check_extent3d(e3d)
self.assertIsNone(City3D.objects.none().extent3d())
self.assertIsNone(City3D.objects.none().aggregate(Extent3D('point'))['point__extent3d'])
def test_perimeter(self):
"""
Testing GeoQuerySet.perimeter() on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
self.assertAlmostEqual(ref_perim_2d,
Polygon2D.objects.perimeter().get(name='2D BBox').perimeter.m,
tol)
self.assertAlmostEqual(ref_perim_3d,
Polygon3D.objects.perimeter().get(name='3D BBox').perimeter.m,
tol)
def test_length(self):
"""
Testing GeoQuerySet.length() on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
self.assertAlmostEqual(ref_length_2d,
Interstate2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
Interstate3D.objects.length().get(name='I-45').length.m,
tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
self.assertAlmostEqual(ref_length_2d,
InterstateProj2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
InterstateProj3D.objects.length().get(name='I-45').length.m,
tol)
def test_scale(self):
"""
Testing GeoQuerySet.scale() on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.scale(1.0, 1.0, zscale):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
def test_translate(self):
"""
Testing GeoQuerySet.translate() on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.translate(0, 0, ztrans):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
| bsd-3-clause |
sysadminmatmoz/odoo-clearcorp | project_budget/__init__.py | 3 | 1052 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_budget | agpl-3.0 |
google/contentbox | third_party/django/core/management/commands/dumpdata.py | 105 | 9939 | from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
from django.db import router, DEFAULT_DB_ALIAS
from django.utils.datastructures import SortedDict
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.'),
make_option('--indent', default=None, dest='indent', type='int',
help='Specifies the indent level to use when pretty-printing output'),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to dump '
'fixtures from. Defaults to the "default" database.'),
make_option('-e', '--exclude', dest='exclude',action='append', default=[],
help='An appname or appname.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).'),
make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
help='Use natural keys if they are available.'),
make_option('-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, including those that would otherwise be filtered or modified by a custom manager."),
make_option('--pks', dest='primary_keys', help="Only dump objects with "
"given primary keys. Accepts a comma seperated list of keys. "
"This option will only work when you specify one model."),
)
help = ("Output the contents of the database as a fixture of the given "
"format (using each model's default manager unless --all is "
"specified).")
args = '[appname appname.ModelName ...]'
def handle(self, *app_labels, **options):
from django.db.models import get_app, get_apps, get_model
format = options.get('format')
indent = options.get('indent')
using = options.get('database')
excludes = options.get('exclude')
show_traceback = options.get('traceback')
use_natural_keys = options.get('use_natural_keys')
use_base_manager = options.get('use_base_manager')
pks = options.get('primary_keys')
if pks:
primary_keys = pks.split(',')
else:
primary_keys = []
excluded_apps = set()
excluded_models = set()
for exclude in excludes:
if '.' in exclude:
app_label, model_name = exclude.split('.', 1)
model_obj = get_model(app_label, model_name)
if not model_obj:
raise CommandError('Unknown model in excludes: %s' % exclude)
excluded_models.add(model_obj)
else:
try:
app_obj = get_app(exclude)
excluded_apps.add(app_obj)
except ImproperlyConfigured:
raise CommandError('Unknown app in excludes: %s' % exclude)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = SortedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app = get_app(app_label)
except ImproperlyConfigured:
raise CommandError("Unknown application: %s" % app_label)
if app in excluded_apps:
continue
model = get_model(app_label, model_label)
if model is None:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
if app in app_list.keys():
if app_list[app] and model not in app_list[app]:
app_list[app].append(model)
else:
app_list[app] = [model]
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app = get_app(app_label)
except ImproperlyConfigured:
raise CommandError("Unknown application: %s" % app_label)
if app in excluded_apps:
continue
app_list[app] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def get_objects():
# Collate the objects to be serialized.
for model in sort_dependencies(app_list.items()):
if model in excluded_models:
continue
if not model._meta.proxy and router.allow_syncdb(using, model):
if use_base_manager:
objects = model._base_manager
else:
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
for obj in queryset.iterator():
yield obj
try:
self.stdout.ending = None
serializers.serialize(format, get_objects(), indent=indent,
use_natural_keys=use_natural_keys, stream=self.stdout)
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
def sort_dependencies(app_list):
"""Sort a list of app,modellist pairs into a single list of models.
The single list of models is sorted so that any model with a natural key
is serialized before a normal model, and any model with a natural key
dependency has it's dependencies serialized first.
"""
from django.db.models import get_model, get_models
# Process the list of models, and get the list of dependencies
model_dependencies = []
models = set()
for app, model_list in app_list:
if model_list is None:
model_list = get_models(app)
for model in model_list:
models.add(model)
# Add any explicitly defined dependencies
if hasattr(model, 'natural_key'):
deps = getattr(model.natural_key, 'dependencies', [])
if deps:
deps = [get_model(*d.split('.')) for d in deps]
else:
deps = []
# Now add a dependency for any FK or M2M relation with
# a model that defines a natural key
for field in model._meta.fields:
if hasattr(field.rel, 'to'):
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
for field in model._meta.many_to_many:
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
model_dependencies.append((model, deps))
model_dependencies.reverse()
# Now sort the models to ensure that dependencies are met. This
# is done by repeatedly iterating over the input list of models.
# If all the dependencies of a given model are in the final list,
# that model is promoted to the end of the final list. This process
# continues until the input list is empty, or we do a full iteration
# over the input models without promoting a model to the final list.
# If we do a full iteration without a promotion, that means there are
# circular dependencies in the list.
model_list = []
while model_dependencies:
skipped = []
changed = False
while model_dependencies:
model, deps = model_dependencies.pop()
# If all of the models in the dependency list are either already
# on the final model list, or not on the original serialization list,
# then we've found another model with all it's dependencies satisfied.
found = True
for candidate in ((d not in models or d in model_list) for d in deps):
if not candidate:
found = False
if found:
model_list.append(model)
changed = True
else:
skipped.append((model, deps))
if not changed:
raise CommandError("Can't resolve dependencies for %s in serialized app list." %
', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)
for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))
)
model_dependencies = skipped
return model_list
| apache-2.0 |
Cinntax/home-assistant | homeassistant/components/notion/config_flow.py | 4 | 2056 | """Config flow to configure the Notion integration."""
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from .const import DOMAIN
@callback
def configured_instances(hass):
"""Return a set of configured Notion instances."""
return set(
entry.data[CONF_USERNAME] for entry in hass.config_entries.async_entries(DOMAIN)
)
@config_entries.HANDLERS.register(DOMAIN)
class NotionFlowHandler(config_entries.ConfigFlow):
"""Handle a Notion config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _show_form(self, errors=None):
"""Show the form to the user."""
data_schema = vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
)
return self.async_show_form(
step_id="user", data_schema=data_schema, errors=errors or {}
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
from aionotion import async_get_client
from aionotion.errors import NotionError
if not user_input:
return await self._show_form()
if user_input[CONF_USERNAME] in configured_instances(self.hass):
return await self._show_form({CONF_USERNAME: "identifier_exists"})
session = aiohttp_client.async_get_clientsession(self.hass)
try:
await async_get_client(
user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session
)
except NotionError:
return await self._show_form({"base": "invalid_credentials"})
return self.async_create_entry(title=user_input[CONF_USERNAME], data=user_input)
| apache-2.0 |
kaushik94/tardis | tardis/io/tests/test_atomic.py | 2 | 1654 | import pytest
from astropy.tests.helper import assert_quantity_allclose
from astropy import units as u
from tardis import constants as const
@pytest.fixture
def basic_atom_data(kurucz_atomic_data):
return kurucz_atomic_data.atom_data
@pytest.fixture
def ionization_data(kurucz_atomic_data):
return kurucz_atomic_data.ionization_data
@pytest.fixture
def levels(kurucz_atomic_data):
return kurucz_atomic_data.levels
@pytest.fixture
def lines(kurucz_atomic_data):
return kurucz_atomic_data.lines
def test_atom_data_basic_atom_data(basic_atom_data):
assert basic_atom_data.loc[2, "symbol"] == "He"
assert_quantity_allclose(
basic_atom_data.at[2, "mass"] * u.Unit("g"), 4.002602 * const.u.cgs
)
def test_atom_data_ionization_data(ionization_data):
assert_quantity_allclose(
ionization_data.loc[(2, 1)] * u.Unit("erg"), 24.587387936 * u.Unit("eV")
)
def test_atom_data_levels(levels):
assert_quantity_allclose(
u.Quantity(levels.at[(2, 0, 2), "energy"], u.Unit("erg")).to(
u.Unit("cm-1"), equivalencies=u.spectral()
),
166277.542 * u.Unit("cm-1"),
)
def test_atom_data_lines(lines):
assert_quantity_allclose(
lines.at[(2, 0, 0, 6), "wavelength_cm"] * u.Unit("cm"),
584.335 * u.Unit("Angstrom"),
)
def test_atomic_reprepare(kurucz_atomic_data):
kurucz_atomic_data.prepare_atom_data([14, 20])
lines = kurucz_atomic_data.lines.reset_index()
assert lines["atomic_number"].isin([14, 20]).all()
assert len(lines.loc[lines["atomic_number"] == 14]) > 0
assert len(lines.loc[lines["atomic_number"] == 20]) > 0
| bsd-3-clause |
MarcJoan/django | tests/test_client_regress/urls.py | 352 | 2521 | from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
urlpatterns = [
url(r'', include('test_client.urls')),
url(r'^no_template_view/$', views.no_template_view),
url(r'^staff_only/$', views.staff_only_view),
url(r'^get_view/$', views.get_view),
url(r'^request_data/$', views.request_data),
url(r'^request_data_extended/$', views.request_data, {'template': 'extended.html', 'data': 'bacon'}),
url(r'^arg_view/(?P<name>.+)/$', views.view_with_argument, name='arg_view'),
url(r'^nested_view/$', views.nested_view, name='nested_view'),
url(r'^login_protected_redirect_view/$', views.login_protected_redirect_view),
url(r'^redirects/$', RedirectView.as_view(url='/redirects/further/')),
url(r'^redirects/further/$', RedirectView.as_view(url='/redirects/further/more/')),
url(r'^redirects/further/more/$', RedirectView.as_view(url='/no_template_view/')),
url(r'^redirect_to_non_existent_view/$', RedirectView.as_view(url='/non_existent_view/')),
url(r'^redirect_to_non_existent_view2/$', RedirectView.as_view(url='/redirect_to_non_existent_view/')),
url(r'^redirect_to_self/$', RedirectView.as_view(url='/redirect_to_self/')),
url(r'^redirect_to_self_with_changing_query_view/$', views.redirect_to_self_with_changing_query_view),
url(r'^circular_redirect_1/$', RedirectView.as_view(url='/circular_redirect_2/')),
url(r'^circular_redirect_2/$', RedirectView.as_view(url='/circular_redirect_3/')),
url(r'^circular_redirect_3/$', RedirectView.as_view(url='/circular_redirect_1/')),
url(r'^redirect_other_host/$', RedirectView.as_view(url='https://otherserver:8443/no_template_view/')),
url(r'^set_session/$', views.set_session_view),
url(r'^check_session/$', views.check_session_view),
url(r'^request_methods/$', views.request_methods_view),
url(r'^check_unicode/$', views.return_unicode),
url(r'^check_binary/$', views.return_undecodable_binary),
url(r'^json_response/$', views.return_json_response),
url(r'^parse_unicode_json/$', views.return_json_file),
url(r'^check_headers/$', views.check_headers),
url(r'^check_headers_redirect/$', RedirectView.as_view(url='/check_headers/')),
url(r'^body/$', views.body),
url(r'^read_all/$', views.read_all),
url(r'^read_buffer/$', views.read_buffer),
url(r'^request_context_view/$', views.request_context_view),
url(r'^render_template_multiple_times/$', views.render_template_multiple_times),
]
| bsd-3-clause |
winhamwr/selenium | py/test/selenium/webdriver/firefox/ff_explicit_wait_tests.py | 3 | 1182 | #!/usr/bin/python
#
# Copyright 2011 WebDriver committers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from selenium import webdriver
from selenium.test.selenium.webdriver.support import webdriverwait_tests
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
def setup_module(module):
webserver = SimpleWebServer()
webserver.start()
FirefoxWebDriverWaitTest.webserver = webserver
FirefoxWebDriverWaitTest.driver = webdriver.Firefox()
class FirefoxWebDriverWaitTest(webdriverwait_tests.WebDriverWaitTest):
pass
def teardown_module(module):
FirefoxWebDriverWaitTest.driver.quit()
FirefoxWebDriverWaitTest.webserver.stop()
| apache-2.0 |
MobinRanjbar/hue | desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/update_permissions.py | 35 | 1239 | from django.core.management.base import BaseCommand
from django.contrib.auth.management import create_permissions as _create_permissions
from django_extensions.management.utils import signalcommand
try:
from django.apps import apps as django_apps
get_models = lambda: None
get_app = django_apps.get_app_config
get_all_apps = django_apps.get_app_configs
def create_permissions(app, models, verbosity):
_create_permissions(app, verbosity)
except ImportError:
from django.db.models import get_models, get_app
django_apps = None
def get_all_apps():
apps = set()
for model in get_models():
apps.add(get_app(model._meta.app_label))
return apps
create_permissions = _create_permissions
class Command(BaseCommand):
args = '<app app ...>'
help = 'reloads permissions for specified apps, or all apps if no args are specified'
@signalcommand
def handle(self, *args, **options):
apps = set()
if not args:
apps = get_all_apps()
else:
for arg in args:
apps.add(get_app(arg))
for app in apps:
create_permissions(app, get_models(), int(options.get('verbosity', 3)))
| apache-2.0 |
Dhole/miniBoy | helpers/gen.py | 1 | 1027 |
regs = ["B", "C", "D", "E", "H", "L", "(HL)", "A"]
d = 0x80
for i in range(0, 8):
print('\tSET_OP(0x%02X, "ADD A,%s", op_add, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "ADC A,%s", op_adc, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0x90
for i in range(0, 8):
print('\tSET_OP(0x%02X, "SUB %s", op_sub, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "SBC A,%s", op_sbc, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0xA0
for i in range(0, 8):
print('\tSET_OP(0x%02X, "AND %s", op_and, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "XOR %s", op_xor, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0xB0
for i in range(0, 8):
print('\tSET_OP(0x%02X, "OR %s", op_or, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "CP %s", op_cp, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8])); | bsd-3-clause |
iivic/BoiseStateX | openedx/core/djangoapps/user_api/preferences/views.py | 9 | 10880 | """
NOTE: this API is WIP and has not yet been approved. Do not use this API
without talking to Christina or Andy.
For more information, see:
https://openedx.atlassian.net/wiki/display/TNL/User+API
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from django.db import transaction
from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized, PreferenceValidationError, PreferenceUpdateError
from .api import (
get_user_preference, get_user_preferences, set_user_preference, update_user_preferences, delete_user_preference
)
class PreferencesView(APIView):
"""
**Use Cases**
Get or update the user's preference information. Updates are only
supported through merge patch. Preference values of null in a
patch request are treated as requests to remove the preference.
**Example Requests**
GET /api/user/v1/preferences/{username}/
PATCH /api/user/v1/preferences/{username}/ with content_type "application/merge-patch+json"
**Response Values for GET**
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If a user without "is_staff" access requests preferences for a
different user, an HTTP 404 "Not Found" message is returned.
If the user makes the request for her own account, or makes a
request for another account and has "is_staff" access, an HTTP 200
"OK" response is returned. The response contains a JSON dictionary
with a key/value pair (of type String) for each preference.
The list of preferences depends on your implementation. By default,
the list includes the following preferences.
* account_privacy: The user's setting for sharing her personal
profile. Possible values are "all_users" or "private".
* pref-lan: The user's preferred language, as set in account
settings.
**Response Values for PATCH**
Users can only modify their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If "application/merge-patch+json" is not the specified content
type, a 415 "Unsupported Media Type" response is returned.
If validation errors prevent the update, this method returns a 400
"Bad Request" response that includes a "field_errors" field that
lists all error messages.
If a failure at the time of the update prevents the update, a 400
"Bad Request" error is returned. The JSON collection contains
specific errors.
If the update is successful, an HTTP 204 "No Content" response is
returned with no additional content.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrlOrStaff)
parser_classes = (MergePatchParser,)
def get(self, request, username):
"""
GET /api/user/v1/preferences/{username}/
"""
try:
user_preferences = get_user_preferences(request.user, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(user_preferences)
def patch(self, request, username):
"""
PATCH /api/user/v1/preferences/{username}/
"""
if not request.data or not getattr(request.data, "keys", None):
error_message = _("No data provided for user preference update")
return Response(
{
"developer_message": error_message,
"user_message": error_message
},
status=status.HTTP_400_BAD_REQUEST
)
try:
with transaction.commit_on_success():
update_user_preferences(request.user, request.data, user=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceValidationError as error:
return Response(
{"field_errors": error.preference_errors},
status=status.HTTP_400_BAD_REQUEST
)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
return Response(status=status.HTTP_204_NO_CONTENT)
class PreferencesDetailView(APIView):
"""
**Use Cases**
Get, create, update, or delete a specific user preference.
**Example Requests**
GET /api/user/v1/preferences/{username}/{preference_key}
PUT /api/user/v1/preferences/{username}/{preference_key}
DELETE /api/user/v1/preferences/{username}/{preference_key}
**Response Values for GET**
If the specified username or preference does not exist, an HTTP
404 "Not Found" response is returned.
If a user without "is_staff" access requests preferences for a
different user, a 404 error is returned.
If the user makes the request for her own account, or makes a
request for another account and has "is_staff" access, an HTTP 200
"OK" response is returned that contains a JSON string.
**Response Values for PUT**
Users can only modify their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If the specified preference does not exist, an HTTP 404 "Not
Found" response is returned.
If the request is successful, a 204 "No Content" status is returned
with no additional content.
**Response Values for DELETE**
Users can only delete their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If the specified preference does not exist, an HTTP 404 "Not
Found" response is returned.
If the update is successful, an HTTP 204 "No Content" response is
returned with no additional content.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrlOrStaff)
def get(self, request, username, preference_key):
"""
GET /api/user/v1/preferences/{username}/{preference_key}
"""
try:
value = get_user_preference(request.user, preference_key, username=username)
# There was no preference with that key, raise a 404.
if value is None:
return Response(status=status.HTTP_404_NOT_FOUND)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(value)
def put(self, request, username, preference_key):
"""
PUT /api/user/v1/preferences/{username}/{preference_key}
"""
try:
set_user_preference(request.user, preference_key, request.data, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceValidationError as error:
return Response(
{
"developer_message": error.preference_errors[preference_key]["developer_message"],
"user_message": error.preference_errors[preference_key]["user_message"]
},
status=status.HTTP_400_BAD_REQUEST
)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, username, preference_key):
"""
DELETE /api/user/v1/preferences/{username}/{preference_key}
"""
try:
preference_existed = delete_user_preference(request.user, preference_key, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
if not preference_existed:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(status=status.HTTP_204_NO_CONTENT)
| agpl-3.0 |
isaacyeaton/pyadisi | pyadisi/pyqtgraph/widgets/LayoutWidget.py | 49 | 3425 | from ..Qt import QtGui, QtCore
__all__ = ['LayoutWidget']
class LayoutWidget(QtGui.QWidget):
"""
Convenience class used for laying out QWidgets in a grid.
(It's just a little less effort to use than QGridLayout)
"""
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.layout = QtGui.QGridLayout()
self.setLayout(self.layout)
self.items = {}
self.rows = {}
self.currentRow = 0
self.currentCol = 0
def nextRow(self):
"""Advance to next row for automatic widget placement"""
self.currentRow += 1
self.currentCol = 0
def nextColumn(self, colspan=1):
"""Advance to next column, while returning the current column number
(generally only for internal use--called by addWidget)"""
self.currentCol += colspan
return self.currentCol-colspan
def nextCol(self, *args, **kargs):
"""Alias of nextColumn"""
return self.nextColumn(*args, **kargs)
def addLabel(self, text=' ', row=None, col=None, rowspan=1, colspan=1, **kargs):
"""
Create a QLabel with *text* and place it in the next available cell (or in the cell specified)
All extra keyword arguments are passed to QLabel().
Returns the created widget.
"""
text = QtGui.QLabel(text, **kargs)
self.addItem(text, row, col, rowspan, colspan)
return text
def addLayout(self, row=None, col=None, rowspan=1, colspan=1, **kargs):
"""
Create an empty LayoutWidget and place it in the next available cell (or in the cell specified)
All extra keyword arguments are passed to :func:`LayoutWidget.__init__ <pyqtgraph.LayoutWidget.__init__>`
Returns the created widget.
"""
layout = LayoutWidget(**kargs)
self.addItem(layout, row, col, rowspan, colspan)
return layout
def addWidget(self, item, row=None, col=None, rowspan=1, colspan=1):
"""
Add a widget to the layout and place it in the next available cell (or in the cell specified).
"""
if row == 'next':
self.nextRow()
row = self.currentRow
elif row is None:
row = self.currentRow
if col is None:
col = self.nextCol(colspan)
if row not in self.rows:
self.rows[row] = {}
self.rows[row][col] = item
self.items[item] = (row, col)
self.layout.addWidget(item, row, col, rowspan, colspan)
def getWidget(self, row, col):
"""Return the widget in (*row*, *col*)"""
return self.row[row][col]
#def itemIndex(self, item):
#for i in range(self.layout.count()):
#if self.layout.itemAt(i).graphicsItem() is item:
#return i
#raise Exception("Could not determine index of item " + str(item))
#def removeItem(self, item):
#"""Remove *item* from the layout."""
#ind = self.itemIndex(item)
#self.layout.removeAt(ind)
#self.scene().removeItem(item)
#r,c = self.items[item]
#del self.items[item]
#del self.rows[r][c]
#self.update()
#def clear(self):
#items = []
#for i in list(self.items.keys()):
#self.removeItem(i)
| bsd-3-clause |
Aasmi/scikit-learn | sklearn/cluster/tests/test_birch.py | 342 | 5603 | """
Tests for the birch clustering algorithm.
"""
from scipy import sparse
import numpy as np
from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.cluster.birch import Birch
from sklearn.cluster.hierarchical import AgglomerativeClustering
from sklearn.datasets import make_blobs
from sklearn.linear_model import ElasticNet
from sklearn.metrics import pairwise_distances_argmin, v_measure_score
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
def test_n_samples_leaves_roots():
# Sanity check for the number of samples in leaves and roots
X, y = make_blobs(n_samples=10)
brc = Birch()
brc.fit(X)
n_samples_root = sum([sc.n_samples_ for sc in brc.root_.subclusters_])
n_samples_leaves = sum([sc.n_samples_ for leaf in brc._get_leaves()
for sc in leaf.subclusters_])
assert_equal(n_samples_leaves, X.shape[0])
assert_equal(n_samples_root, X.shape[0])
def test_partial_fit():
# Test that fit is equivalent to calling partial_fit multiple times
X, y = make_blobs(n_samples=100)
brc = Birch(n_clusters=3)
brc.fit(X)
brc_partial = Birch(n_clusters=None)
brc_partial.partial_fit(X[:50])
brc_partial.partial_fit(X[50:])
assert_array_equal(brc_partial.subcluster_centers_,
brc.subcluster_centers_)
# Test that same global labels are obtained after calling partial_fit
# with None
brc_partial.set_params(n_clusters=3)
brc_partial.partial_fit(None)
assert_array_equal(brc_partial.subcluster_labels_, brc.subcluster_labels_)
def test_birch_predict():
# Test the predict method predicts the nearest centroid.
rng = np.random.RandomState(0)
X = generate_clustered_data(n_clusters=3, n_features=3,
n_samples_per_cluster=10)
# n_samples * n_samples_per_cluster
shuffle_indices = np.arange(30)
rng.shuffle(shuffle_indices)
X_shuffle = X[shuffle_indices, :]
brc = Birch(n_clusters=4, threshold=1.)
brc.fit(X_shuffle)
centroids = brc.subcluster_centers_
assert_array_equal(brc.labels_, brc.predict(X_shuffle))
nearest_centroid = pairwise_distances_argmin(X_shuffle, centroids)
assert_almost_equal(v_measure_score(nearest_centroid, brc.labels_), 1.0)
def test_n_clusters():
# Test that n_clusters param works properly
X, y = make_blobs(n_samples=100, centers=10)
brc1 = Birch(n_clusters=10)
brc1.fit(X)
assert_greater(len(brc1.subcluster_centers_), 10)
assert_equal(len(np.unique(brc1.labels_)), 10)
# Test that n_clusters = Agglomerative Clustering gives
# the same results.
gc = AgglomerativeClustering(n_clusters=10)
brc2 = Birch(n_clusters=gc)
brc2.fit(X)
assert_array_equal(brc1.subcluster_labels_, brc2.subcluster_labels_)
assert_array_equal(brc1.labels_, brc2.labels_)
# Test that the wrong global clustering step raises an Error.
clf = ElasticNet()
brc3 = Birch(n_clusters=clf)
assert_raises(ValueError, brc3.fit, X)
# Test that a small number of clusters raises a warning.
brc4 = Birch(threshold=10000.)
assert_warns(UserWarning, brc4.fit, X)
def test_sparse_X():
# Test that sparse and dense data give same results
X, y = make_blobs(n_samples=100, centers=10)
brc = Birch(n_clusters=10)
brc.fit(X)
csr = sparse.csr_matrix(X)
brc_sparse = Birch(n_clusters=10)
brc_sparse.fit(csr)
assert_array_equal(brc.labels_, brc_sparse.labels_)
assert_array_equal(brc.subcluster_centers_,
brc_sparse.subcluster_centers_)
def check_branching_factor(node, branching_factor):
subclusters = node.subclusters_
assert_greater_equal(branching_factor, len(subclusters))
for cluster in subclusters:
if cluster.child_:
check_branching_factor(cluster.child_, branching_factor)
def test_branching_factor():
# Test that nodes have at max branching_factor number of subclusters
X, y = make_blobs()
branching_factor = 9
# Purposefully set a low threshold to maximize the subclusters.
brc = Birch(n_clusters=None, branching_factor=branching_factor,
threshold=0.01)
brc.fit(X)
check_branching_factor(brc.root_, branching_factor)
brc = Birch(n_clusters=3, branching_factor=branching_factor,
threshold=0.01)
brc.fit(X)
check_branching_factor(brc.root_, branching_factor)
# Raises error when branching_factor is set to one.
brc = Birch(n_clusters=None, branching_factor=1, threshold=0.01)
assert_raises(ValueError, brc.fit, X)
def check_threshold(birch_instance, threshold):
"""Use the leaf linked list for traversal"""
current_leaf = birch_instance.dummy_leaf_.next_leaf_
while current_leaf:
subclusters = current_leaf.subclusters_
for sc in subclusters:
assert_greater_equal(threshold, sc.radius)
current_leaf = current_leaf.next_leaf_
def test_threshold():
# Test that the leaf subclusters have a threshold lesser than radius
X, y = make_blobs(n_samples=80, centers=4)
brc = Birch(threshold=0.5, n_clusters=None)
brc.fit(X)
check_threshold(brc, 0.5)
brc = Birch(threshold=5.0, n_clusters=None)
brc.fit(X)
check_threshold(brc, 5.)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.