path
stringlengths 23
146
| source_code
stringlengths 0
261k
|
|---|---|
data/RoseOu/flasky/venv/lib/python2.7/site-packages/pygments/styles/default.py
|
"""
pygments.styles.default
~~~~~~~~~~~~~~~~~~~~~~~
The default highlighting style.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class DefaultStyle(Style):
"""
The default style (inspired by Emacs 22).
"""
background_color = "
default_style = ""
styles = {
Whitespace: "
Comment: "italic
Comment.Preproc: "noitalic
Keyword: "bold
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold
Operator: "
Operator.Word: "bold
Name.Builtin: "
Name.Function: "
Name.Class: "bold
Name.Namespace: "bold
Name.Exception: "bold
Name.Variable: "
Name.Constant: "
Name.Label: "
Name.Entity: "bold
Name.Attribute: "
Name.Tag: "bold
Name.Decorator: "
String: "
String.Doc: "italic",
String.Interpol: "bold
String.Escape: "bold
String.Regex: "
String.Symbol: "
String.Other: "
Number: "
Generic.Heading: "bold
Generic.Subheading: "bold
Generic.Deleted: "
Generic.Inserted: "
Generic.Error: "
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold
Generic.Output: "
Generic.Traceback: "
Error: "border:
}
|
data/QingdaoU/OnlineJudge/utils/captcha/views.py
|
from django.http import HttpResponse
from utils.captcha import Captcha
def show_captcha(request):
return HttpResponse(Captcha(request).display(), content_type="image/gif")
|
data/RDFLib/rdflib/rdflib/plugins/sparql/results/csvresults.py
|
"""
This module implements a parser and serializer for the CSV SPARQL result
formats
http://www.w3.org/TR/sparql11-results-csv-tsv/
"""
import codecs
import csv
from rdflib import Variable, BNode, URIRef, Literal, py3compat
from rdflib.query import Result, ResultSerializer, ResultParser
class CSVResultParser(ResultParser):
def __init__(self):
self.delim = ","
def parse(self, source):
r = Result('SELECT')
if isinstance(source.read(0), py3compat.bytestype):
source = codecs.getreader('utf-8')(source)
reader = csv.reader(source, delimiter=self.delim)
r.vars = [Variable(x) for x in reader.next()]
r.bindings = []
for row in reader:
r.bindings.append(self.parseRow(row, r.vars))
return r
def parseRow(self, row, v):
return dict((var, val)
for var, val in zip(v, [self.convertTerm(t)
for t in row]) if val is not None)
def convertTerm(self, t):
if t == "":
return None
if t.startswith("_:"):
return BNode(t)
if t.startswith("http://") or t.startswith("https://"):
return URIRef(t)
return Literal(t)
class CSVResultSerializer(ResultSerializer):
def __init__(self, result):
ResultSerializer.__init__(self, result)
self.delim = ","
if result.type != "SELECT":
raise Exception(
"CSVSerializer can only serialize select query results")
def serialize(self, stream, encoding='utf-8'):
if py3compat.PY3:
import codecs
stream = codecs.getwriter(encoding)(stream)
out = csv.writer(stream, delimiter=self.delim)
vs = [self.serializeTerm(v, encoding) for v in self.result.vars]
out.writerow(vs)
for row in self.result.bindings:
out.writerow([self.serializeTerm(
row.get(v), encoding) for v in self.result.vars])
def serializeTerm(self, term, encoding):
if term is None:
return ""
if not py3compat.PY3:
return term.encode(encoding)
else:
return term
|
data/OpenAssets/openassets/openassets/__init__.py
|
"""
Reference implementation of the Open Assets Protocol.
"""
__version__ = '1.3'
|
data/JasonGiedymin/Flask-Module-Scaffold/src/myapp/apps/dummy/__init__.py
|
from myapp import utils
module_name = utils.getFinalName(__name__)
module = utils.getModule(__name__, subdomain=module_name)
import views
import views.morepages
|
data/Yelp/mrjob/tests/test_retry.py
|
from mrjob.retry import RetryGoRound
from mrjob.retry import RetryWrapper
from tests.py2 import Mock
from tests.py2 import TestCase
class RetryGoRoundTestCase(TestCase):
def test_empty(self):
self.assertRaises(
ValueError, RetryGoRound, [], lambda ex: isinstance(ex, IOError))
def test_success(self):
a1 = Mock()
a1.f = Mock(__name__='f', return_value=1)
a2 = Mock()
a2.f = Mock(__name__='f', return_value=2)
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertEqual(a.f(), 1)
self.assertEqual(a1.f.call_count, 1)
self.assertEqual(a2.f.call_count, 0)
def test_one_failure(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=IOError)
a1.x = 100
a2 = Mock()
a2.f = Mock(__name__='f', return_value=2)
a2.x = 200
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertEqual(a.x, 100)
self.assertEqual(a.f(), 2)
self.assertEqual(a.x, 200)
self.assertEqual(a.f(), 2)
self.assertEqual(a1.f.call_count, 1)
self.assertEqual(a2.f.call_count, 2)
def test_all_fail(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=IOError)
a1.x = 100
a2 = Mock()
a2.f = Mock(__name__='f', side_effect=IOError)
a2.x = 200
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertEqual(a.x, 100)
self.assertRaises(IOError, a.f)
self.assertEqual(a.x, 100)
self.assertRaises(IOError, a.f)
self.assertEqual(a1.f.call_count, 2)
self.assertEqual(a2.f.call_count, 2)
def test_unrecoverable_error(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=ValueError)
a2 = Mock()
a2.f = Mock(__name__='f', return_value=2)
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertRaises(ValueError, a.f)
self.assertRaises(ValueError, a.f)
self.assertEqual(a1.f.call_count, 2)
self.assertEqual(a2.f.call_count, 0)
def test_can_wrap_around(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=[IOError, 1])
a2 = Mock()
a2.f = Mock(__name__='f', side_effect=[2, IOError])
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertEqual(a.f(), 2)
self.assertEqual(a.f(), 1)
self.assertEqual(a1.f.call_count, 2)
self.assertEqual(a2.f.call_count, 2)
def test_wrapping(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=IOError)
a2 = Mock()
a2.f = Mock(__name__='f', return_value=2)
a = RetryGoRound([a1, a2], lambda ex: isinstance(ex, IOError))
self.assertEqual(a.f('foo', bar='baz'), 2)
a1.f.assert_called_once_with('foo', bar='baz')
a2.f.assert_called_once_with('foo', bar='baz')
self.assertEqual(a.f.__name__, 'f')
class RetryWrapperTestCase(TestCase):
def test_success(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=None)
a = RetryWrapper(
a1,
retry_if=lambda x: True,
backoff=0.0001,
max_tries=2
)
a.f()
a1.f.assert_called_once_with()
def test_failure(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=[IOError, 1])
a = RetryWrapper(
a1,
retry_if=lambda x: True,
backoff=0.0001,
max_tries=2
)
self.assertEqual(a.f(), 1)
self.assertEqual(a1.f.call_count, 2)
def test_failure_raises_if_all_tries_fail(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=[IOError, IOError])
a = RetryWrapper(
a1,
retry_if=lambda x: True,
backoff=0.0001,
max_tries=2
)
with self.assertRaises(IOError):
a.f()
self.assertEqual(a1.f.call_count, 2)
def test_try_till_success(self):
a1 = Mock()
a1.f = Mock(__name__='f', side_effect=[IOError, IOError, None])
a = RetryWrapper(
a1,
retry_if=lambda x: True,
backoff=0.0001,
max_tries=0
)
a.f()
self.assertEqual(a1.f.call_count, 3)
|
data/ab77/netflix-proxy/auth/pbkdf2_sha256_hash.py
|
import sys
from passlib.hash import pbkdf2_sha256
from passlib.utils import generate_password
try:
plaintext = sys.argv[1]
except IndexError:
plaintext = generate_password()
print plaintext, pbkdf2_sha256.encrypt(plaintext, rounds=200000, salt_size=16)
|
data/TylerTemp/docpie/docpie/example/git/git_clone.py
|
'''
usage: python git.py clone [options] [--] <repo> [<dir>]
options:
-v, --verbose be more verbose
-q, --quiet be more quiet
--progress force progress reporting
-n, --no-checkout don't create a checkout
--bare create a bare repository
--mirror create a mirror repository (implies bare)
-l, --local to clone from a local repository
--no-hardlinks don't use local hardlinks, always copy
-s, --shared setup as shared repository
--recursive initialize submodules in the clone
--recurse-submodules initialize submodules in the clone
--template <template-directory>
directory from which templates will be used
--reference <repo> reference repository
-o, --origin <branch>
use <branch> instead of 'origin' to track upstream
-b, --branch <branch>
checkout <branch> instead of the remote's HEAD
-u, --upload-pack <path>
path to git-upload-pack on the remote
--depth <depth> create a shallow clone of that depth
'''
from docpie import docpie
if __name__ == '__main__':
print(docpie(__doc__, name='git.py'))
|
data/Parsely/streamparse/streamparse/dsl/__init__.py
|
"""
Python Storm Topology DSL
"""
from .stream import Grouping, Stream
from .topology import Topology
|
data/Piratenfraktion-Berlin/OwnTube/videoportal/BitTornadoABC/BitTornado/parseargs.py
|
from types import *
from cStringIO import StringIO
def splitLine(line, COLS=80, indent=10):
indent = " " * indent
width = COLS - (len(indent) + 1)
if indent and width < 15:
width = COLS - 2
indent = " "
s = StringIO()
i = 0
for word in line.split():
if i == 0:
s.write(indent+word)
i = len(word)
continue
if i + len(word) >= width:
s.write('\n'+indent+word)
i = len(word)
continue
s.write(' '+word)
i += len(word) + 1
return s.getvalue()
def formatDefinitions(options, COLS, presets = {}):
s = StringIO()
for (longname, default, doc) in options:
s.write('--' + longname + ' <arg>\n')
default = presets.get(longname, default)
if type(default) in (IntType, LongType):
try:
default = int(default)
except:
pass
if default is not None:
doc += ' (defaults to ' + repr(default) + ')'
s.write(splitLine(doc, COLS, 10))
s.write('\n\n')
return s.getvalue()
def usage(string):
raise ValueError(string)
def defaultargs(options):
l = {}
for (longname, default, doc) in options:
if default is not None:
l[longname] = default
return l
def parseargs(argv, options, minargs = None, maxargs = None, presets = {}):
config = {}
longkeyed = {}
for option in options:
longname, default, doc = option
longkeyed[longname] = option
config[longname] = default
for longname in presets.keys():
config[longname] = presets[longname]
options = []
args = []
pos = 0
while pos < len(argv):
if argv[pos][:2] != '--':
args.append(argv[pos])
pos += 1
else:
if pos == len(argv) - 1:
usage('parameter passed in at end with no value')
key, value = argv[pos][2:], argv[pos+1]
pos += 2
if not longkeyed.has_key(key):
usage('unknown key --' + key)
longname, default, doc = longkeyed[key]
try:
t = type(config[longname])
if t is NoneType or t is StringType:
config[longname] = value
elif t in (IntType, LongType):
config[longname] = long(value)
elif t is FloatType:
config[longname] = float(value)
else:
assert 0
except ValueError, e:
usage('wrong format of --%s - %s' % (key, str(e)))
for key, value in config.items():
if value is None:
usage("Option --%s is required." % key)
if minargs is not None and len(args) < minargs:
usage("Must supply at least %d args." % minargs)
if maxargs is not None and len(args) > maxargs:
usage("Too many args - %d max." % maxargs)
return (config, args)
def test_parseargs():
assert parseargs(('d', '--a', 'pq', 'e', '--b', '3', '--c', '4.5', 'f'), (('a', 'x', ''), ('b', 1, ''), ('c', 2.3, ''))) == ({'a': 'pq', 'b': 3, 'c': 4.5}, ['d', 'e', 'f'])
assert parseargs([], [('a', 'x', '')]) == ({'a': 'x'}, [])
assert parseargs(['--a', 'x', '--a', 'y'], [('a', '', '')]) == ({'a': 'y'}, [])
try:
parseargs([], [('a', 'x', '')])
except ValueError:
pass
try:
parseargs(['--a', 'x'], [])
except ValueError:
pass
try:
parseargs(['--a'], [('a', 'x', '')])
except ValueError:
pass
try:
parseargs([], [], 1, 2)
except ValueError:
pass
assert parseargs(['x'], [], 1, 2) == ({}, ['x'])
assert parseargs(['x', 'y'], [], 1, 2) == ({}, ['x', 'y'])
try:
parseargs(['x', 'y', 'z'], [], 1, 2)
except ValueError:
pass
try:
parseargs(['--a', '2.0'], [('a', 3, '')])
except ValueError:
pass
try:
parseargs(['--a', 'z'], [('a', 2.1, '')])
except ValueError:
pass
|
data/SickRage/SickRage/lib/sqlalchemy/dialects/mssql/information_schema.py
|
from ... import Table, MetaData, Column
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
from ... import cast
from ... import util
from ...sql import expression
from ...ext.compiler import compiles
ischema = MetaData()
class CoerceUnicode(TypeDecorator):
impl = Unicode
def process_bind_param(self, value, dialect):
if util.py2k and isinstance(value, util.binary_type):
value = value.decode(dialect.encoding)
return value
def bind_expression(self, bindvalue):
return _cast_on_2005(bindvalue)
class _cast_on_2005(expression.ColumnElement):
def __init__(self, bindvalue):
self.bindvalue = bindvalue
@compiles(_cast_on_2005)
def _compile(element, compiler, **kw):
from . import base
if compiler.dialect.server_version_info < base.MS_2005_VERSION:
return compiler.process(element.bindvalue, **kw)
else:
return compiler.process(cast(element.bindvalue, Unicode), **kw)
schemata = Table("SCHEMATA", ischema,
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
schema="INFORMATION_SCHEMA")
tables = Table("TABLES", ischema,
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"),
schema="INFORMATION_SCHEMA")
columns = Table("COLUMNS", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
Column("IS_NULLABLE", Integer, key="is_nullable"),
Column("DATA_TYPE", String, key="data_type"),
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
Column("CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"),
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
Column("COLUMN_DEFAULT", Integer, key="column_default"),
Column("COLLATION_NAME", String, key="collation_name"),
schema="INFORMATION_SCHEMA")
constraints = Table("TABLE_CONSTRAINTS", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
Column("CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"),
schema="INFORMATION_SCHEMA")
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
schema="INFORMATION_SCHEMA")
key_constraints = Table("KEY_COLUMN_USAGE", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
schema="INFORMATION_SCHEMA")
ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"),
Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
key="unique_constraint_catalog"),
Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
key="unique_constraint_schema"),
Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
key="unique_constraint_name"),
Column("MATCH_OPTION", String, key="match_option"),
Column("UPDATE_RULE", String, key="update_rule"),
Column("DELETE_RULE", String, key="delete_rule"),
schema="INFORMATION_SCHEMA")
views = Table("VIEWS", ischema,
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
Column("CHECK_OPTION", String, key="check_option"),
Column("IS_UPDATABLE", String, key="is_updatable"),
schema="INFORMATION_SCHEMA")
|
data/IanLewis/kay/kay/auth/decorators.py
|
"""
A decorators related authentication.
:Copyright: (c) 2009 Accense Technology, Inc.,
Ian Lewis <IanMLewis@gmail.com>
All rights reserved.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from google.appengine.api import users
from werkzeug import redirect
from werkzeug.exceptions import Forbidden
from kay.utils import (
create_login_url, create_logout_url
)
from kay.utils.decorators import auto_adapt_to_methods
def login_required(func):
def inner(request, *args, **kwargs):
if request.user.is_anonymous():
if request.is_xhr:
return Forbidden()
else:
return redirect(create_login_url(request.url))
return func(request, *args, **kwargs)
update_wrapper(inner, func)
return inner
login_required = auto_adapt_to_methods(login_required)
def admin_required(func):
def inner(request, *args, **kwargs):
if not request.user.is_admin:
if request.user.is_anonymous():
return redirect(create_login_url(request.url))
else:
raise Forbidden(
description =
'<p>You don\'t have the permission to access the requested resource.'
' It is either read-protected or not readable by the server.</p>'
' Maybe you want <a href="%s">logout</a>?' %
create_logout_url(request.url)
)
return func(request, *args, **kwargs)
update_wrapper(inner, func)
return inner
admin_required = auto_adapt_to_methods(admin_required)
|
data/Parsely/pykafka/pykafka/balancedconsumer.py
|
from __future__ import division
"""
Author: Emmett Butler
"""
__license__ = """
Copyright 2015 Parse.ly, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ["BalancedConsumer"]
import itertools
import logging
import socket
import sys
import time
import traceback
from uuid import uuid4
import weakref
from kazoo.client import KazooClient
from kazoo.handlers.gevent import SequentialGeventHandler
from kazoo.exceptions import NoNodeException, NodeExistsError
from kazoo.recipe.watchers import ChildrenWatch
from .common import OffsetType
from .exceptions import KafkaException, PartitionOwnedError, ConsumerStoppedException
from .handlers import GEventHandler
from .simpleconsumer import SimpleConsumer
from .utils.compat import range, get_bytes, itervalues, iteritems, get_string
try:
from . import rdkafka
except ImportError:
rdkafka = False
log = logging.getLogger(__name__)
def _catch_thread_exception(fn):
"""Sets self._worker_exception when fn raises an exception"""
def wrapped(self, *args, **kwargs):
try:
ret = fn(self, *args, **kwargs)
except Exception:
self._worker_exception = sys.exc_info()
else:
return ret
return wrapped
class BalancedConsumer(object):
"""
A self-balancing consumer for Kafka that uses ZooKeeper to communicate
with other balancing consumers.
Maintains a single instance of SimpleConsumer, periodically using the
consumer rebalancing algorithm to reassign partitions to this
SimpleConsumer.
"""
def __init__(self,
topic,
cluster,
consumer_group,
fetch_message_max_bytes=1024 * 1024,
num_consumer_fetchers=1,
auto_commit_enable=False,
auto_commit_interval_ms=60 * 1000,
queued_max_messages=2000,
fetch_min_bytes=1,
fetch_wait_max_ms=100,
offsets_channel_backoff_ms=1000,
offsets_commit_max_retries=5,
auto_offset_reset=OffsetType.EARLIEST,
consumer_timeout_ms=-1,
rebalance_max_retries=5,
rebalance_backoff_ms=2 * 1000,
zookeeper_connection_timeout_ms=6 * 1000,
zookeeper_connect='127.0.0.1:2181',
zookeeper=None,
auto_start=True,
reset_offset_on_start=False,
post_rebalance_callback=None,
use_rdkafka=False,
compacted_topic=False):
"""Create a BalancedConsumer instance
:param topic: The topic this consumer should consume
:type topic: :class:`pykafka.topic.Topic`
:param cluster: The cluster to which this consumer should connect
:type cluster: :class:`pykafka.cluster.Cluster`
:param consumer_group: The name of the consumer group this consumer
should join.
:type consumer_group: bytes
:param fetch_message_max_bytes: The number of bytes of messages to
attempt to fetch with each fetch request
:type fetch_message_max_bytes: int
:param num_consumer_fetchers: The number of workers used to make
FetchRequests
:type num_consumer_fetchers: int
:param auto_commit_enable: If true, periodically commit to kafka the
offset of messages already fetched by this consumer. This also
requires that `consumer_group` is not `None`.
:type auto_commit_enable: bool
:param auto_commit_interval_ms: The frequency (in milliseconds) at which
the consumer's offsets are committed to kafka. This setting is
ignored if `auto_commit_enable` is `False`.
:type auto_commit_interval_ms: int
:param queued_max_messages: The maximum number of messages buffered for
consumption in the internal
:class:`pykafka.simpleconsumer.SimpleConsumer`
:type queued_max_messages: int
:param fetch_min_bytes: The minimum amount of data (in bytes) that the
server should return for a fetch request. If insufficient data is
available, the request will block until sufficient data is available.
:type fetch_min_bytes: int
:param fetch_wait_max_ms: The maximum amount of time (in milliseconds)
that the server will block before answering a fetch request if
there isn't sufficient data to immediately satisfy `fetch_min_bytes`.
:type fetch_wait_max_ms: int
:param offsets_channel_backoff_ms: Backoff time to retry failed offset
commits and fetches.
:type offsets_channel_backoff_ms: int
:param offsets_commit_max_retries: The number of times the offset commit
worker should retry before raising an error.
:type offsets_commit_max_retries: int
:param auto_offset_reset: What to do if an offset is out of range. This
setting indicates how to reset the consumer's internal offset
counter when an `OffsetOutOfRangeError` is encountered.
:type auto_offset_reset: :class:`pykafka.common.OffsetType`
:param consumer_timeout_ms: Amount of time (in milliseconds) the
consumer may spend without messages available for consumption
before returning None.
:type consumer_timeout_ms: int
:param rebalance_max_retries: The number of times the rebalance should
retry before raising an error.
:type rebalance_max_retries: int
:param rebalance_backoff_ms: Backoff time (in milliseconds) between
retries during rebalance.
:type rebalance_backoff_ms: int
:param zookeeper_connection_timeout_ms: The maximum time (in
milliseconds) that the consumer waits while establishing a
connection to zookeeper.
:type zookeeper_connection_timeout_ms: int
:param zookeeper_connect: Comma-separated (ip1:port1,ip2:port2) strings
indicating the zookeeper nodes to which to connect.
:type zookeeper_connect: str
:param zookeeper: A KazooClient connected to a Zookeeper instance.
If provided, `zookeeper_connect` is ignored.
:type zookeeper: :class:`kazoo.client.KazooClient`
:param auto_start: Whether the consumer should begin communicating
with zookeeper after __init__ is complete. If false, communication
can be started with `start()`.
:type auto_start: bool
:param reset_offset_on_start: Whether the consumer should reset its
internal offset counter to `self._auto_offset_reset` and commit that
offset immediately upon starting up
:type reset_offset_on_start: bool
:param post_rebalance_callback: A function to be called when a rebalance is
in progress. This function should accept three arguments: the
:class:`pykafka.balancedconsumer.BalancedConsumer` instance that just
completed its rebalance, a dict of partitions that it owned before the
rebalance, and a dict of partitions it owns after the rebalance. These dicts
map partition ids to the most recently known offsets for those partitions.
This function can optionally return a dictionary mapping partition ids to
offsets. If it does, the consumer will reset its offsets to the supplied
values before continuing consumption.
Note that the BalancedConsumer is in a poorly defined state at
the time this callback runs, so that accessing its properties
(such as `held_offsets` or `partitions`) might yield confusing
results. Instead, the callback should really rely on the
provided partition-id dicts, which are well-defined.
:type post_rebalance_callback: function
:param use_rdkafka: Use librdkafka-backed consumer if available
:type use_rdkafka: bool
:param compacted_topic: Set to read from a compacted topic. Forces
consumer to use less stringent message ordering logic because compacted
topics do not provide offsets in stict incrementing order.
:type compacted_topic: bool
"""
self._cluster = cluster
if not isinstance(consumer_group, bytes):
raise TypeError("consumer_group must be a bytes object")
self._consumer_group = consumer_group
self._topic = topic
self._auto_commit_enable = auto_commit_enable
self._auto_commit_interval_ms = auto_commit_interval_ms
self._fetch_message_max_bytes = fetch_message_max_bytes
self._fetch_min_bytes = fetch_min_bytes
self._rebalance_max_retries = rebalance_max_retries
self._num_consumer_fetchers = num_consumer_fetchers
self._queued_max_messages = queued_max_messages
self._fetch_wait_max_ms = fetch_wait_max_ms
self._rebalance_backoff_ms = rebalance_backoff_ms
self._consumer_timeout_ms = consumer_timeout_ms
self._offsets_channel_backoff_ms = offsets_channel_backoff_ms
self._offsets_commit_max_retries = offsets_commit_max_retries
self._auto_offset_reset = auto_offset_reset
self._zookeeper_connect = zookeeper_connect
self._zookeeper_connection_timeout_ms = zookeeper_connection_timeout_ms
self._reset_offset_on_start = reset_offset_on_start
self._post_rebalance_callback = post_rebalance_callback
self._generation_id = -1
self._running = False
self._worker_exception = None
self._worker_trace_logged = False
self._is_compacted_topic = compacted_topic
if not rdkafka and use_rdkafka:
raise ImportError("use_rdkafka requires rdkafka to be installed")
if isinstance(self._cluster.handler, GEventHandler) and use_rdkafka:
raise ImportError("use_rdkafka cannot be used with gevent")
self._use_rdkafka = rdkafka and use_rdkafka
self._rebalancing_lock = cluster.handler.Lock()
self._consumer = None
self._consumer_id = get_bytes("{hostname}:{uuid}".format(
hostname=socket.gethostname(),
uuid=uuid4()
))
self._setting_watches = True
self._topic_path = '/consumers/{group}/owners/{topic}'.format(
group=self._consumer_group,
topic=self._topic.name)
self._consumer_id_path = '/consumers/{group}/ids'.format(
group=self._consumer_group)
self._zookeeper = None
self._owns_zookeeper = zookeeper is None
if zookeeper is not None:
self._zookeeper = zookeeper
if auto_start is True:
self.start()
def __del__(self):
log.debug("Finalising {}".format(self))
if self._running:
self.stop()
def __repr__(self):
return "<{module}.{name} at {id_} (consumer_group={group})>".format(
module=self.__class__.__module__,
name=self.__class__.__name__,
id_=hex(id(self)),
group=self._consumer_group
)
def _raise_worker_exceptions(self):
"""Raises exceptions encountered on worker threads"""
if self._worker_exception is not None:
_, ex, tb = self._worker_exception
if not self._worker_trace_logged:
self._worker_trace_logged = True
log.error("Exception encountered in worker thread:\n%s",
"".join(traceback.format_tb(tb)))
raise ex
@property
def topic(self):
"""The topic this consumer consumes"""
return self._topic
@property
def partitions(self):
"""A list of the partitions that this consumer consumes"""
return self._consumer.partitions if self._consumer else dict()
@property
def _partitions(self):
"""Convenient shorthand for set of partitions internally held"""
return set(
[] if self.partitions is None else itervalues(self.partitions))
@property
def held_offsets(self):
"""Return a map from partition id to held offset for each partition"""
if not self._consumer:
return None
return self._consumer.held_offsets
def start(self):
"""Open connections and join a consumer group."""
try:
if self._zookeeper is None:
self._setup_zookeeper(self._zookeeper_connect,
self._zookeeper_connection_timeout_ms)
self._zookeeper.ensure_path(self._topic_path)
self._add_self()
self._running = True
self._set_watches()
self._rebalance()
except Exception:
log.exception("Stopping consumer in response to error")
self.stop()
def stop(self):
"""Close the zookeeper connection and stop consuming.
This method should be called as part of a graceful shutdown process.
"""
log.debug("Stopping {}".format(self))
with self._rebalancing_lock:
self._running = False
if self._consumer is not None:
self._consumer.stop()
if self._owns_zookeeper:
self._zookeeper.stop()
else:
self._remove_partitions(self._get_held_partitions())
try:
self._zookeeper.delete(self._path_self)
except NoNodeException:
pass
def _setup_zookeeper(self, zookeeper_connect, timeout):
"""Open a connection to a ZooKeeper host.
:param zookeeper_connect: The 'ip:port' address of the zookeeper node to
which to connect.
:type zookeeper_connect: str
:param timeout: Connection timeout (in milliseconds)
:type timeout: int
"""
kazoo_kwargs = {'timeout': timeout / 1000}
if isinstance(self._cluster.handler, GEventHandler):
kazoo_kwargs['handler'] = SequentialGeventHandler()
self._zookeeper = KazooClient(zookeeper_connect, **kazoo_kwargs)
self._zookeeper.start()
def _setup_internal_consumer(self, partitions=None, start=True):
"""Instantiate an internal SimpleConsumer instance"""
if partitions is None:
partitions = []
if partitions != self._partitions:
cns = self._get_internal_consumer(partitions=list(partitions), start=start)
if self._post_rebalance_callback is not None:
old_offsets = (self._consumer.held_offsets
if self._consumer else dict())
new_offsets = cns.held_offsets
try:
reset_offsets = self._post_rebalance_callback(
self, old_offsets, new_offsets)
except Exception:
log.exception("post rebalance callback threw an exception")
self._worker_exception = sys.exc_info()
return False
if reset_offsets:
cns.reset_offsets(partition_offsets=[
(cns.partitions[id_], offset) for
(id_, offset) in iteritems(reset_offsets)])
self._consumer = cns
return True
def _get_internal_consumer(self, partitions=None, start=True):
"""Instantiate a SimpleConsumer for internal use.
If there is already a SimpleConsumer instance held by this object,
disable its workers and mark it for garbage collection before
creating a new one.
"""
if partitions is None:
partitions = []
reset_offset_on_start = self._reset_offset_on_start
if self._consumer is not None:
self._consumer.stop()
reset_offset_on_start = False
Cls = (rdkafka.RdKafkaSimpleConsumer
if self._use_rdkafka else SimpleConsumer)
return Cls(
self._topic,
self._cluster,
consumer_group=self._consumer_group,
partitions=partitions,
auto_commit_enable=self._auto_commit_enable,
auto_commit_interval_ms=self._auto_commit_interval_ms,
fetch_message_max_bytes=self._fetch_message_max_bytes,
fetch_min_bytes=self._fetch_min_bytes,
num_consumer_fetchers=self._num_consumer_fetchers,
queued_max_messages=self._queued_max_messages,
fetch_wait_max_ms=self._fetch_wait_max_ms,
consumer_timeout_ms=self._consumer_timeout_ms,
offsets_channel_backoff_ms=self._offsets_channel_backoff_ms,
offsets_commit_max_retries=self._offsets_commit_max_retries,
auto_offset_reset=self._auto_offset_reset,
reset_offset_on_start=reset_offset_on_start,
auto_start=start,
compacted_topic=self._is_compacted_topic,
generation_id=self._generation_id,
consumer_id=self._consumer_id
)
def _decide_partitions(self, participants, consumer_id=None):
"""Decide which partitions belong to this consumer.
Uses the consumer rebalancing algorithm described here
http://kafka.apache.org/documentation.html
It is very important that the participants array is sorted,
since this algorithm runs on each consumer and indexes into the same
array. The same array index operation must return the same
result on each consumer.
:param participants: Sorted list of ids of all other consumers in this
consumer group.
:type participants: Iterable of `bytes`
:param consumer_id: The ID of the consumer for which to generate a partition
assignment. Defaults to `self._consumer_id`
"""
p_to_str = lambda p: '-'.join([str(p.topic.name), str(p.leader.id), str(p.id)])
all_parts = self._topic.partitions.values()
all_parts = sorted(all_parts, key=p_to_str)
participants = sorted(participants)
idx = participants.index(consumer_id or self._consumer_id)
parts_per_consumer = len(all_parts) // len(participants)
remainder_ppc = len(all_parts) % len(participants)
start = parts_per_consumer * idx + min(idx, remainder_ppc)
num_parts = parts_per_consumer + (0 if (idx + 1 > remainder_ppc) else 1)
new_partitions = itertools.islice(all_parts, start, start + num_parts)
new_partitions = set(new_partitions)
log.info('%s: Balancing %i participants for %i partitions. Owning %i partitions.',
self._consumer_id, len(participants), len(all_parts),
len(new_partitions))
log.debug('My partitions: %s', [p_to_str(p) for p in new_partitions])
return new_partitions
def _get_participants(self):
"""Use zookeeper to get the other consumers of this topic.
:return: A sorted list of the ids of other consumers of this
consumer's topic
"""
try:
consumer_ids = self._zookeeper.get_children(self._consumer_id_path)
except NoNodeException:
log.debug("Consumer group doesn't exist. "
"No participants to find")
return []
participants = []
for id_ in consumer_ids:
try:
topic, stat = self._zookeeper.get("%s/%s" % (self._consumer_id_path, id_))
if topic == self._topic.name:
participants.append(get_bytes(id_))
except NoNodeException:
pass
participants = sorted(participants)
return participants
def _build_watch_callback(self, fn, proxy):
"""Return a function that's safe to use as a ChildrenWatch callback
Fixes the issue from https://github.com/Parsely/pykafka/issues/345
"""
def _callback(children):
try:
proxy.__repr__()
except ReferenceError:
return False
return fn(proxy, children)
return _callback
def _set_watches(self):
"""Set watches in zookeeper that will trigger rebalances.
Rebalances should be triggered whenever a broker, topic, or consumer
znode is changed in zookeeper. This ensures that the balance of the
consumer group remains up-to-date with the current state of the
cluster.
"""
proxy = weakref.proxy(self)
_brokers_changed = self._build_watch_callback(BalancedConsumer._brokers_changed, proxy)
_topics_changed = self._build_watch_callback(BalancedConsumer._topics_changed, proxy)
_consumers_changed = self._build_watch_callback(BalancedConsumer._consumers_changed, proxy)
self._setting_watches = True
broker_path = '/brokers/ids'
try:
self._broker_watcher = ChildrenWatch(
self._zookeeper, broker_path,
_brokers_changed
)
except NoNodeException:
raise Exception(
'The broker_path "%s" does not exist in your '
'ZooKeeper cluster -- is your Kafka cluster running?'
% broker_path)
self._topics_watcher = ChildrenWatch(
self._zookeeper,
'/brokers/topics',
_topics_changed
)
self._consumer_watcher = ChildrenWatch(
self._zookeeper, self._consumer_id_path,
_consumers_changed
)
self._setting_watches = False
def _add_self(self):
"""Register this consumer in zookeeper.
This method ensures that the number of participants is at most the
number of partitions.
"""
participants = self._get_participants()
if len(self._topic.partitions) <= len(participants):
raise KafkaException("Cannot add consumer: more consumers than partitions")
self._zookeeper.create(
self._path_self, self._topic.name, ephemeral=True, makepath=True)
@property
def _path_self(self):
"""Path where this consumer should be registered in zookeeper"""
return '{path}/{id_}'.format(
path=self._consumer_id_path,
id_=get_string(self._consumer_id)
)
def _update_member_assignment(self):
"""Decide and assign new partitions for this consumer"""
for i in range(self._rebalance_max_retries):
try:
participants = self._get_participants()
if self._consumer_id not in participants:
self._add_self()
participants.append(self._consumer_id)
new_partitions = self._decide_partitions(participants)
if not new_partitions:
log.warning("No partitions assigned to consumer %s",
self._consumer_id)
current_zk_parts = self._get_held_partitions()
self._remove_partitions(current_zk_parts - new_partitions)
self._add_partitions(new_partitions - current_zk_parts)
if self._setup_internal_consumer(new_partitions):
log.info('Rebalancing Complete.')
break
except PartitionOwnedError as ex:
if i == self._rebalance_max_retries - 1:
log.warning('Failed to acquire partition %s after %d retries.',
ex.partition, i)
raise
log.info('Unable to acquire partition %s. Retrying', ex.partition)
self._cluster.handler.sleep(i * (self._rebalance_backoff_ms / 1000))
def _rebalance(self):
"""Start the rebalancing process for this consumer
This method is called whenever a zookeeper watch is triggered.
"""
if self._consumer is not None:
self.commit_offsets()
with self._rebalancing_lock:
if not self._running:
raise ConsumerStoppedException
log.info('Rebalancing consumer "%s" for topic "%s".' % (
self._consumer_id, self._topic.name))
self._update_member_assignment()
def _path_from_partition(self, p):
"""Given a partition, return its path in zookeeper.
:type p: :class:`pykafka.partition.Partition`
"""
return "%s/%s-%s" % (self._topic_path, p.leader.id, p.id)
def _remove_partitions(self, partitions):
"""Remove partitions from the zookeeper registry for this consumer.
:param partitions: The partitions to remove.
:type partitions: Iterable of :class:`pykafka.partition.Partition`
"""
for p in partitions:
self._zookeeper.delete(self._path_from_partition(p))
def _add_partitions(self, partitions):
"""Add partitions to the zookeeper registry for this consumer.
:param partitions: The partitions to add.
:type partitions: Iterable of :class:`pykafka.partition.Partition`
"""
for p in partitions:
try:
self._zookeeper.create(
self._path_from_partition(p),
value=get_bytes(self._consumer_id),
ephemeral=True
)
except NodeExistsError:
raise PartitionOwnedError(p)
def _get_held_partitions(self):
"""Build a set of partitions zookeeper says we own"""
zk_partition_ids = set()
all_partitions = self._zookeeper.get_children(self._topic_path)
for partition_slug in all_partitions:
try:
owner_id, stat = self._zookeeper.get(
'{path}/{slug}'.format(
path=self._topic_path, slug=partition_slug))
if owner_id == get_bytes(self._consumer_id):
zk_partition_ids.add(int(partition_slug.split('-')[1]))
except NoNodeException:
pass
return set(self._topic.partitions[_id] for _id in zk_partition_ids)
@_catch_thread_exception
def _brokers_changed(self, brokers):
if not self._running:
return False
if self._setting_watches:
return
log.debug("Rebalance triggered by broker change ({})".format(
self._consumer_id))
self._rebalance()
@_catch_thread_exception
def _consumers_changed(self, consumers):
if not self._running:
return False
if self._setting_watches:
return
log.debug("Rebalance triggered by consumer change ({})".format(
self._consumer_id))
self._rebalance()
@_catch_thread_exception
def _topics_changed(self, topics):
if not self._running:
return False
if self._setting_watches:
return
log.debug("Rebalance triggered by topic change ({})".format(
self._consumer_id))
self._rebalance()
def reset_offsets(self, partition_offsets=None):
"""Reset offsets for the specified partitions
Issue an OffsetRequest for each partition and set the appropriate
returned offset in the consumer's internal offset counter.
:param partition_offsets: (`partition`, `timestamp_or_offset`) pairs to
reset where `partition` is the partition for which to reset the offset
and `timestamp_or_offset` is EITHER the timestamp of the message
whose offset the partition should have OR the new offset the
partition should have
:type partition_offsets: Sequence of tuples of the form
(:class:`pykafka.partition.Partition`, int)
NOTE: If an instance of `timestamp_or_offset` is treated by kafka as
an invalid offset timestamp, this function directly sets the consumer's
internal offset counter for that partition to that instance of
`timestamp_or_offset`. On the next fetch request, the consumer attempts
to fetch messages starting from that offset. See the following link
for more information on what kafka treats as a valid offset timestamp:
https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol
"""
self._raise_worker_exceptions()
if not self._consumer:
raise ConsumerStoppedException("Internal consumer is stopped")
self._consumer.reset_offsets(partition_offsets=partition_offsets)
def consume(self, block=True):
"""Get one message from the consumer
:param block: Whether to block while waiting for a message
:type block: bool
"""
def consumer_timed_out():
"""Indicates whether the consumer has received messages recently"""
if self._consumer_timeout_ms == -1:
return False
disp = (time.time() - self._last_message_time) * 1000.0
return disp > self._consumer_timeout_ms
message = None
self._last_message_time = time.time()
while message is None and not consumer_timed_out():
self._raise_worker_exceptions()
try:
message = self._consumer.consume(block=block)
except (ConsumerStoppedException, AttributeError):
if not self._running:
raise ConsumerStoppedException
continue
if message:
self._last_message_time = time.time()
if not block:
return message
return message
def __iter__(self):
"""Yield an infinite stream of messages until the consumer times out"""
while True:
message = self.consume(block=True)
if not message:
raise StopIteration
yield message
def commit_offsets(self):
"""Commit offsets for this consumer's partitions
Uses the offset commit/fetch API
"""
self._raise_worker_exceptions()
if not self._consumer:
raise KafkaException("Cannot commit offsets - consumer not started")
return self._consumer.commit_offsets()
|
data/OpenSlides/OpenSlides/tests/integration/core/test_views.py
|
import json
from django.core.urlresolvers import reverse
from django.dispatch import receiver
from rest_framework import status
from rest_framework.test import APIClient
from openslides import __version__ as version
from openslides.core.config import ConfigVariable, config
from openslides.core.models import CustomSlide, Projector
from openslides.core.signals import config_signal
from openslides.utils.rest_api import ValidationError
from openslides.utils.test import TestCase
class ProjectorAPI(TestCase):
"""
Tests requests from the anonymous user.
"""
def test_slide_on_default_projector(self):
self.client.login(username='admin', password='admin')
customslide = CustomSlide.objects.create(title='title_que1olaish5Wei7que6i', text='text_aishah8Eh7eQuie5ooji')
default_projector = Projector.objects.get(pk=1)
default_projector.config = {
'aae4a07b26534cfb9af4232f361dce73': {'name': 'core/customslide', 'id': customslide.id}}
default_projector.save()
response = self.client.get(reverse('projector-detail', args=['1']))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(json.loads(response.content.decode()), {
'id': 1,
'elements': {
'aae4a07b26534cfb9af4232f361dce73':
{'id': customslide.id,
'uuid': 'aae4a07b26534cfb9af4232f361dce73',
'name': 'core/customslide'}},
'scale': 0,
'scroll': 0})
def test_invalid_slide_on_default_projector(self):
self.client.login(username='admin', password='admin')
default_projector = Projector.objects.get(pk=1)
default_projector.config = {
'fc6ef43b624043068c8e6e7a86c5a1b0': {'name': 'invalid_slide'}}
default_projector.save()
response = self.client.get(reverse('projector-detail', args=['1']))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(json.loads(response.content.decode()), {
'id': 1,
'elements': {
'fc6ef43b624043068c8e6e7a86c5a1b0':
{'name': 'invalid_slide',
'uuid': 'fc6ef43b624043068c8e6e7a86c5a1b0',
'error': 'Projector element does not exist.'}},
'scale': 0,
'scroll': 0})
class VersionView(TestCase):
"""
Tests the version info view.
"""
def test_get(self):
self.client.login(username='admin', password='admin')
response = self.client.get(reverse('core_version'))
self.assertEqual(json.loads(response.content.decode()), {
'openslides_version': version,
'plugins': [
{'verbose_name': 'OpenSlides Test Plugin',
'description': 'This is a test plugin for OpenSlides.',
'version': 'unknown'}]})
class ConfigViewSet(TestCase):
"""
Tests requests to deal with config variables.
"""
def test_retrieve(self):
self.client.login(username='admin', password='admin')
config['test_var_aeW3Quahkah1phahCheo'] = 'test_value_Oovoojieme7eephaed2A'
response = self.client.get(reverse('config-detail', args=['test_var_aeW3Quahkah1phahCheo']))
self.assertEqual(
response.data,
{'key': 'test_var_aeW3Quahkah1phahCheo',
'value': 'test_value_Oovoojieme7eephaed2A'})
def test_update(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_Xeiizi7ooH8Thuk5aida']),
{'value': 'test_value_Phohx3oopeichaiTheiw'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(config['test_var_Xeiizi7ooH8Thuk5aida'], 'test_value_Phohx3oopeichaiTheiw')
def test_update_wrong_datatype(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_ohhii4iavoh5Phoh5ahg']),
{'value': 'test_value_string'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'detail': "Wrong datatype. Expected <class 'int'>, got <class 'str'>."})
def test_update_wrong_datatype_that_can_be_converted(self):
"""
Try to send a string that can be converted to an integer to an integer
field.
"""
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_ohhii4iavoh5Phoh5ahg']),
{'value': '12345'})
self.assertEqual(response.status_code, 200)
def test_update_good_choice(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_wei0Rei9ahzooSohK1ph']),
{'value': 'key_2_yahb2ain1aeZ1lea1Pei'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(config['test_var_wei0Rei9ahzooSohK1ph'], 'key_2_yahb2ain1aeZ1lea1Pei')
def test_update_bad_choice(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_wei0Rei9ahzooSohK1ph']),
{'value': 'test_value_bad_string'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'detail': 'Invalid input. Choice does not match.'})
def test_update_validator_ok(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_Hi7Oje8Oith7goopeeng']),
{'value': 'valid_string'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(config['test_var_Hi7Oje8Oith7goopeeng'], 'valid_string')
def test_update_validator_invalid(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_Hi7Oje8Oith7goopeeng']),
{'value': 'invalid_string'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'detail': 'Invalid input.'})
def test_update_only_with_key(self):
self.client = APIClient()
self.client.login(username='admin', password='admin')
response = self.client.put(
reverse('config-detail', args=['test_var_Xeiizi7ooH8Thuk5aida']))
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'detail': 'Invalid input. Config value is missing.'})
def test_metadata_with_hidden(self):
self.client.login(username='admin', password='admin')
response = self.client.options(reverse('config-list'))
filter_obj = filter(
lambda item: item['key'] == 'test_var_pud2zah2teeNaiP7IoNa',
response.data['config_groups'][0]['subgroups'][0]['items'])
self.assertEqual(len(list(filter_obj)), 0)
def validator_for_testing(value):
"""
Validator for testing.
"""
if value == 'invalid_string':
raise ValidationError({'detail': 'Invalid input.'})
@receiver(config_signal, dispatch_uid='set_simple_config_view_integration_config_test')
def set_simple_config_view_integration_config_test(sender, **kwargs):
"""
Sets a simple config view with some config variables but without
grouping.
"""
yield ConfigVariable(
name='test_var_aeW3Quahkah1phahCheo',
default_value=None,
label='test_label_aeNahsheu8phahk8taYo')
yield ConfigVariable(
name='test_var_Xeiizi7ooH8Thuk5aida',
default_value='')
yield ConfigVariable(
name='test_var_ohhii4iavoh5Phoh5ahg',
default_value=0,
input_type='integer')
yield ConfigVariable(
name='test_var_wei0Rei9ahzooSohK1ph',
default_value='key_1_Queit2juchoocos2Vugh',
input_type='choice',
choices=(
{'value': 'key_1_Queit2juchoocos2Vugh', 'display_name': 'label_1_Queit2juchoocos2Vugh'},
{'value': 'key_2_yahb2ain1aeZ1lea1Pei', 'display_name': 'label_2_yahb2ain1aeZ1lea1Pei'}))
yield ConfigVariable(
name='test_var_Hi7Oje8Oith7goopeeng',
default_value='',
validators=(validator_for_testing,))
yield ConfigVariable(
name='test_var_pud2zah2teeNaiP7IoNa',
default_value=None,
label='test_label_xaing7eefaePheePhei6',
hidden=True)
|
data/StackStorm/st2/st2common/st2common/util/uid.py
|
"""
Module containing model UID related utility functions.
"""
from st2common.models.db.stormbase import UIDFieldMixin
__all__ = [
'parse_uid'
]
def parse_uid(uid):
"""
Parse UID string.
:return: (ResourceType, uid_remainder)
:rtype: ``tuple``
"""
if UIDFieldMixin.UID_SEPARATOR not in uid:
raise ValueError('Invalid uid: %s' % (uid))
parsed = uid.split(UIDFieldMixin.UID_SEPARATOR)
if len(parsed) < 2:
raise ValueError('Invalid or malformed uid: %s' % (uid))
resource_type = parsed[0]
uid_remainder = parsed[1:]
return (resource_type, uid_remainder)
|
data/Pylons/substanced/substanced/sdi/views/login.py
|
from pyramid.httpexceptions import (
HTTPForbidden,
HTTPFound
)
from pyramid.renderers import get_renderer
from pyramid.session import check_csrf_token
from pyramid.security import (
remember,
forget,
Authenticated,
NO_PERMISSION_REQUIRED,
)
from ...util import get_oid
from .. import mgmt_view
from substanced.interfaces import IUserLocator
from substanced.principal import DefaultUserLocator
from substanced.event import LoggedIn
@mgmt_view(
name='login',
renderer='templates/login.pt',
tab_condition=False,
permission=NO_PERMISSION_REQUIRED
)
@mgmt_view(
renderer='templates/login.pt',
context=HTTPForbidden,
permission=NO_PERMISSION_REQUIRED,
tab_condition=False
)
@mgmt_view(
renderer='templates/forbidden.pt',
context=HTTPForbidden,
permission=NO_PERMISSION_REQUIRED,
effective_principals=Authenticated,
tab_condition=False
)
def login(context, request):
login_url = request.sdiapi.mgmt_path(request.context, 'login')
referrer = request.url
if '/auditstream-sse' in referrer:
return HTTPForbidden()
if login_url in referrer:
referrer = request.sdiapi.mgmt_path(request.virtual_root)
came_from = request.session.setdefault('sdi.came_from', referrer)
login = ''
password = ''
if 'form.submitted' in request.params:
try:
check_csrf_token(request)
except:
request.sdiapi.flash('Failed login (CSRF)', 'danger')
else:
login = request.params['login']
password = request.params['password']
adapter = request.registry.queryMultiAdapter(
(context, request),
IUserLocator
)
if adapter is None:
adapter = DefaultUserLocator(context, request)
user = adapter.get_user_by_login(login)
if user is not None and user.check_password(password):
request.session.pop('sdi.came_from', None)
headers = remember(request, get_oid(user))
request.registry.notify(LoggedIn(login, user, context, request))
return HTTPFound(location = came_from, headers = headers)
request.sdiapi.flash('Failed login', 'danger')
template = get_renderer('substanced.sdi.views:templates/login.pt'
).implementation()
return dict(
url = request.sdiapi.mgmt_path(request.virtual_root, '@@login'),
came_from = came_from,
login = login,
password = password,
login_template = template,
)
@mgmt_view(
name='logout',
tab_condition=False,
permission=NO_PERMISSION_REQUIRED
)
def logout(request):
headers = forget(request)
return HTTPFound(location = request.sdiapi.mgmt_path(request.context),
headers = headers)
|
data/PyTables/PyTables/examples/attributes1.py
|
import numpy as np
import tables
fileh = tables.open_file("attributes1.h5", mode="w",
title="Testing attributes")
root = fileh.root
a = np.array([1, 2, 4], np.int32)
hdfarray = fileh.create_array(root, 'array', a, "Integer array")
hdfarray.attrs.string = "This is an example"
hdfarray.attrs.char = "1"
hdfarray.attrs.int = 12
hdfarray.attrs.float = 12.32
hdfarray.attrs.object = {"a": 32.1, "b": 1, "c": [1, 2]}
fileh.close()
|
data/SneakersInc/HoneyMalt/src/HoneyMalt/transforms/__init__.py
|
__author__ = 'catalyst256'
__copyright__ = 'Copyright 2014, Honeymalt Project'
__credits__ = []
__license__ = 'GPL'
__version__ = '0.1'
__maintainer__ = 'catalyst256'
__email__ = 'catalyst256@gmail.com'
__status__ = 'Development'
__all__ = [
'kipposensor',
'kipposearchdate',
'kipposearchip',
'kippofiles',
'kippoinput',
'kippogeoip',
'kippocreds',
'kipposessions',
'kippoip',
'common'
]
|
data/SmileyChris/easy-thumbnails/easy_thumbnails/management/commands/thumbnail_cleanup.py
|
import gc
import os
import time
from datetime import datetime, date, timedelta
from optparse import make_option
from django.core.files.storage import get_storage_class
from django.core.management.base import BaseCommand
from easy_thumbnails.conf import settings
from easy_thumbnails.models import Source
class ThumbnailCollectionCleaner(object):
"""
Remove thumbnails and DB references to non-existing source images.
"""
sources = 0
thumbnails = 0
thumbnails_deleted = 0
source_refs_deleted = 0
execution_time = 0
def _get_absolute_path(self, path):
return os.path.join(settings.MEDIA_ROOT, path)
def _get_relative_path(self, path):
return os.path.relpath(path, settings.MEDIA_ROOT)
def _check_if_exists(self, storage, path):
try:
return storage.exists(path)
except Exception as e:
print("Something went wrong when checking existance of %s:" % path)
print(str(e))
def _delete_sources_by_id(self, ids):
Source.objects.all().filter(id__in=ids).delete()
def clean_up(self, dry_run=False, verbosity=1, last_n_days=0,
cleanup_path=None, storage=None):
"""
Iterate through sources. Delete database references to sources
not existing, including its corresponding thumbnails (files and
database references).
"""
if dry_run:
print ("Dry run...")
if not storage:
storage = get_storage_class(settings.THUMBNAIL_DEFAULT_STORAGE)()
sources_to_delete = []
time_start = time.time()
query = Source.objects.all()
if last_n_days > 0:
today = date.today()
query = query.filter(
modified__range=(today - timedelta(days=last_n_days), today))
if cleanup_path:
query = query.filter(name__startswith=cleanup_path)
for source in queryset_iterator(query):
self.sources += 1
abs_source_path = self._get_absolute_path(source.name)
if not self._check_if_exists(storage, abs_source_path):
if verbosity > 0:
print ("Source not present:", abs_source_path)
self.source_refs_deleted += 1
sources_to_delete.append(source.id)
for thumb in source.thumbnails.all():
self.thumbnails_deleted += 1
abs_thumbnail_path = self._get_absolute_path(thumb.name)
if self._check_if_exists(storage, abs_thumbnail_path):
if not dry_run:
storage.delete(abs_thumbnail_path)
if verbosity > 0:
print ("Deleting thumbnail:", abs_thumbnail_path)
if len(sources_to_delete) >= 1000 and not dry_run:
self._delete_sources_by_id(sources_to_delete)
sources_to_delete = []
if not dry_run:
self._delete_sources_by_id(sources_to_delete)
self.execution_time = round(time.time() - time_start)
def print_stats(self):
"""
Print statistics about the cleanup performed.
"""
print(
"{0:-<48}".format(str(datetime.now().strftime('%Y-%m-%d %H:%M '))))
print("{0:<40} {1:>7}".format("Sources checked:", self.sources))
print("{0:<40} {1:>7}".format(
"Source references deleted from DB:", self.source_refs_deleted))
print("{0:<40} {1:>7}".format("Thumbnails deleted from disk:",
self.thumbnails_deleted))
print("(Completed in %s seconds)\n" % self.execution_time)
def queryset_iterator(queryset, chunksize=1000):
"""
The queryset iterator helps to keep the memory consumption down.
And also making it easier to process for weaker computers.
"""
primary_key = 0
last_pk = queryset.order_by('-pk')[0].pk
queryset = queryset.order_by('pk')
while primary_key < last_pk:
for row in queryset.filter(pk__gt=primary_key)[:chunksize]:
primary_key = row.pk
yield row
gc.collect()
class Command(BaseCommand):
help = """ Deletes thumbnails that no longer have an original file. """
option_list = BaseCommand.option_list + (
make_option(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='Dry run the execution.'),
make_option(
'--last-n-days',
action='store',
dest='last_n_days',
default=0,
type='int',
help='The number of days back in time to clean thumbnails for.'),
make_option(
'--path',
action='store',
dest='cleanup_path',
type='string',
help='Specify a path to clean up.'),
)
def handle(self, *args, **options):
tcc = ThumbnailCollectionCleaner()
tcc.clean_up(
dry_run=options.get('dry_run', False),
verbosity=int(options.get('verbosity', 1)),
last_n_days=int(options.get('last_n_days', 0)),
cleanup_path=options.get('cleanup_path'))
tcc.print_stats()
|
data/MirantisWorkloadMobility/CloudFerry/cloudferry/bin/main.py
|
from fabric import main as fab_main
from cloudferry import fabfile
def main():
fab = fabfile.__file__
if fab.endswith('.pyc'):
fab = fab[:-1]
fab_main.main([fab])
if __name__ == '__main__':
main()
|
data/Orange-OpenSource/bagpipe-bgp/bagpipe/exabgp/message/update/attribute/origin.py
|
"""
attributes.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2012 Exa Networks. All rights reserved.
Modified by Orange - 2014
"""
from bagpipe.exabgp.message.update.attribute import AttributeID,Flag,Attribute
class Origin (Attribute):
ID = AttributeID.ORIGIN
FLAG = Flag.TRANSITIVE
MULTIPLE = False
IGP = 0x00
EGP = 0x01
INCOMPLETE = 0x02
def __init__ (self,origin):
self.origin = origin
def pack (self):
return self._attribute(chr(self.origin))
def __len__ (self):
return len(self.pack())
def __str__ (self):
if self.origin == 0x00: return 'IGP'
if self.origin == 0x01: return 'EGP'
if self.origin == 0x02: return 'INCOMPLETE'
return 'INVALID'
def __repr__ (self):
return str(self)
def __cmp__(self,other):
if ( not isinstance(other,Origin)
or (self.origin != other.origin)
):
return -1
else:
return 0
|
data/Socialsquare/Franklin/skills/migrations/0009_auto__add_unique_skill_slug__add_unique_project_slug__add_unique_train.py
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
import re
from django.utils.text import slugify
class Migration(SchemaMigration):
def forwards(self, orm):
if not db.dry_run:
for _class in [orm.Skill, orm.Project, orm.TrainingBit]:
for obj in _class.objects.all():
objs_with_slug = _class.objects.filter(slug__exact=obj.slug)
if obj.slug == '':
obj.slug = slugify(obj.name)
if obj.slug == '':
obj.slug = 'empty-name'
if objs_with_slug.count() > 1:
existing_slugs = _class.objects.\
filter(slug__regex='^' + obj.slug + r'-\d+').\
values_list('slug', flat=True)
if len(existing_slugs) > 0:
last_existing_slug = sorted(existing_slugs)[-1]
m = re.match(r'^.*-(\d+)$', last_existing_slug)
id_counter = int(m.group(1)) + 1
else:
id_counter = 1
obj.slug = '%s-%u' % (obj.slug, id_counter)
obj.save()
db.create_unique('skills_skill', ['slug'])
db.create_unique('skills_project', ['slug'])
db.create_unique('skills_trainingbit', ['slug'])
def backwards(self, orm):
db.delete_unique('skills_trainingbit', ['slug'])
db.delete_unique('skills_project', ['slug'])
db.delete_unique('skills_skill', ['slug'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['auth.Permission']"})
},
'auth.permission': {
'Meta': {'object_name': 'Permission', 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'db_table': "'django_content_type'", 'object_name': 'ContentType', 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'global_change_lab.user': {
'Meta': {'object_name': 'User'},
'datetime_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']", 'related_name': "'user_set'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'blank': 'True', 'null': 'True', 'max_length': '100'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'skills_completed': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.Skill']", 'related_name': "'users_completed'"}),
'skills_in_progress': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.Skill']", 'related_name': "'users_in_progress'"}),
'trainingbits_completed': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.TrainingBit']", 'related_name': "'users_completed'"}),
'trainingbits_in_progress': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.TrainingBit']", 'related_name': "'users_in_progress'"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['auth.Permission']", 'related_name': "'user_set'"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'skills.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_flagged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'to': "orm['skills.Comment']", 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['skills.Project']"}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
},
'skills.image': {
'Meta': {'object_name': 'Image'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']", 'related_name': "'uploaded_images'"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '36'}),
'image': ('django.db.models.fields.files.ImageField', [], {'blank': 'True', 'max_length': '100'}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
},
'skills.like': {
'Meta': {'object_name': 'Like'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
},
'skills.project': {
'Meta': {'object_name': 'Project'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'blank': 'True', 'max_length': '100'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_flagged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'link_title': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'link_url': ('django.db.models.fields.URLField', [], {'blank': 'True', 'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60'}),
'trainingbit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['skills.TrainingBit']"}),
'updated_at': ('skills.models.AutoDateTimeField', [], {}),
'video': ('embed_video.fields.EmbedVideoField', [], {'blank': 'True', 'null': 'True', 'max_length': '200'})
},
'skills.skill': {
'Meta': {'object_name': 'Skill'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60'}),
'trainingbits': ('sortedm2m.fields.SortedManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.TrainingBit']"}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
},
'skills.topic': {
'Meta': {'object_name': 'Topic'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.Skill']"}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'trainingbits': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'to': "orm['skills.TrainingBit']"}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
},
'skills.trainingbit': {
'Meta': {'object_name': 'TrainingBit', 'ordering': "['-created_at']"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['global_change_lab.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'blank': 'True', 'max_length': '100'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'json_content': ('django.db.models.fields.TextField', [], {'default': '\'{"learn":[],"act":[],"share":[]}\''}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'recommended': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60'}),
'updated_at': ('skills.models.AutoDateTimeField', [], {})
}
}
complete_apps = ['skills']
|
data/ProstoKSI/django-voter/voter/urls.py
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('voter.views',
url(r'^like/(?P<obj_type>[\w]+)/(?P<obj_id>[\d]+)/$', 'set_like', name='ratings_like'),
url(r'^dislike/(?P<obj_type>[\w]+)/(?P<obj_id>[\d]+)/$', 'set_dislike', name='ratings_dislike'),
)
|
data/Pylons/substanced/substanced/principal/tests/test_subscribers.py
|
import unittest
from pyramid import testing
class Test_principal_added(unittest.TestCase):
def _callFUT(self, event):
from ..subscribers import principal_added
return principal_added(event)
def test_event_wo_loading_attr(self):
event = testing.DummyResource()
event.object = testing.DummyResource()
self.assertRaises(AttributeError, self._callFUT, event)
def test_event_w_loading_True(self):
event = testing.DummyResource(loading=True)
result = self._callFUT(event)
self.assertEqual(result, None)
def test_wo_principals_service(self):
from zope.interface import directlyProvides
from ...interfaces import IFolder
event = testing.DummyResource(loading=False)
root = testing.DummyResource()
directlyProvides(root, IFolder)
event.object = root['testing'] = testing.DummyResource()
self.assertRaises(ValueError, self._callFUT, event)
def test_user_not_in_groups(self):
from ...testing import make_site
from ...interfaces import IUser
site = make_site()
user = testing.DummyResource(__provides__=IUser)
site['user'] = user
event = testing.DummyResource(object=user, loading=False)
self._callFUT(event)
def test_user_in_groups(self):
from ...testing import make_site
from ...interfaces import IUser
site = make_site()
groups = site['principals']['groups']
groups['user'] = testing.DummyResource()
user = testing.DummyResource(__provides__=IUser)
site['user'] = user
event = testing.DummyResource(object=user, loading=False)
self.assertRaises(ValueError, self._callFUT, event)
def test_group_not_in_users(self):
from ...testing import make_site
site = make_site()
group = testing.DummyResource()
site['groups'] = group
event = testing.DummyResource(object=group, loading=False)
self._callFUT(event)
def test_group_in_users(self):
from ...testing import make_site
site = make_site()
users = site['principals']['users']
users['group'] = testing.DummyResource()
group = testing.DummyResource()
site['group'] = group
event = testing.DummyResource(object=group, loading=False)
self.assertRaises(ValueError, self._callFUT, event)
class Test_user_will_be_removed(unittest.TestCase):
def _callFUT(self, event):
from ..subscribers import user_will_be_removed
return user_will_be_removed(event)
def test_loading(self):
event = testing.DummyResource(loading=True, moving=None)
result = self._callFUT(event)
self.assertEqual(result, None)
def test_moving(self):
event = testing.DummyResource(loading=False, moving=True)
result = self._callFUT(event)
self.assertEqual(result, None)
def test_it(self):
from ...interfaces import IFolder
parent = testing.DummyResource(__provides__=IFolder)
user = testing.DummyResource()
reset = testing.DummyResource()
def commit_suicide():
reset.committed = True
reset.commit_suicide = commit_suicide
objectmap = DummyObjectMap((reset,))
parent.__objectmap__ = objectmap
parent['user'] = user
event = testing.DummyResource(object=user, loading=False, moving=None)
self._callFUT(event)
self.assertTrue(reset.committed)
def test_it_moving(self):
event = testing.DummyResource(object=None, loading=False)
event.moving = True
self.assertEqual(self._callFUT(event), None)
class Test_user_added(unittest.TestCase):
def _callFUT(self, event):
from ..subscribers import user_added
return user_added(event)
def test_loading(self):
event = testing.DummyResource(loading=True)
result = self._callFUT(event)
self.assertEqual(result, None)
def test_it_user_has_no_oid(self):
user = testing.DummyResource()
event = testing.DummyResource(object=user, loading=False)
event.registry = DummyRegistry()
self.assertRaises(AttributeError, self._callFUT, event)
def test_it(self):
from pyramid.security import Allow
user = testing.DummyResource()
user.__oid__ = 1
event = testing.DummyResource(object=user, loading=False)
event.registry = DummyRegistry()
self._callFUT(event)
self.assertEqual(
user.__acl__,
[(Allow, 1, ('sdi.view',
'sdi.edit-properties',
'sdi.change-password',
))])
class Test_acl_maybe_added(unittest.TestCase):
def _callFUT(self, event):
from ..subscribers import acl_maybe_added
return acl_maybe_added(event)
def test_moving(self):
event = DummyEvent(moving=True, loading=False)
self.assertEqual(self._callFUT(event), False)
def test_loading(self):
event = DummyEvent(moving=None, loading=True)
self.assertEqual(self._callFUT(event), False)
def test_objectmap_is_None(self):
event = DummyEvent(moving=None, object=None, loading=False)
self.assertEqual(self._callFUT(event), None)
def test_no_acls(self):
from substanced.interfaces import IFolder
resource1 = testing.DummyResource(__provides__=IFolder)
resource2 = testing.DummyResource()
resource1['resource2'] = resource2
objectmap = DummyObjectMap()
resource1.__objectmap__ = objectmap
event = DummyEvent(moving=None, object=resource1, loading=False)
self._callFUT(event)
self.assertEqual(objectmap.connections, [])
def test_with_acls(self):
from ...interfaces import PrincipalToACLBearing
from substanced.interfaces import IFolder
resource1 = testing.DummyResource(__provides__=IFolder)
resource2 = testing.DummyResource()
resource1['resource2'] = resource2
resource1.__acl__ = [(None, 'fred', None), (None, 1, None)]
resource2.__acl__ = [(None, 'bob', None), (None, 2, None)]
objectmap = DummyObjectMap()
resource1.__objectmap__ = objectmap
event = DummyEvent(moving=None, object=resource1, loading=False)
self._callFUT(event)
self.assertEqual(
objectmap.connections,
[(2, resource2, PrincipalToACLBearing),
(1, resource1, PrincipalToACLBearing)]
)
class Test_acl_modified(unittest.TestCase):
def _callFUT(self, event):
from ..subscribers import acl_modified
return acl_modified(event)
def test_objectmap_is_None(self):
event = DummyEvent(object=None)
self.assertEqual(self._callFUT(event), None)
def test_gardenpath(self):
from ...interfaces import PrincipalToACLBearing
resource = testing.DummyResource()
objectmap = DummyObjectMap()
resource.__objectmap__ = objectmap
event = DummyEvent(
object=resource,
new_acl=[(None, 'fred', None), (None, 1, None)],
old_acl=[(None, 'bob', None), (None, 2, None)],
)
self._callFUT(event)
self.assertEqual(
objectmap.connections,
[(1, resource, PrincipalToACLBearing)]
)
self.assertEqual(
objectmap.disconnections,
[(2, resource, PrincipalToACLBearing)]
)
class DummyObjectMap(object):
def __init__(self, result=()):
self.result = result
self.connections = []
self.disconnections = []
def targets(self, object, reftype):
return self.result
def connect(self, source, target, reftype):
self.connections.append((source, target, reftype))
def disconnect(self, source, target, reftype):
self.disconnections.append((source, target, reftype))
class DummyEvent(object):
def __init__(self, **kw):
self.__dict__.update(kw)
class DummyRegistry(object):
def subscribers(self, *arg):
return
|
data/Infinidat/gitpy/tests/test_basic.py
|
import unittest
import os
import commands
from utils import get_temporary_location
from utils import delete_repository
from gitpy import LocalRepository
from gitpy import find_repository
from gitpy.exceptions import GitException
class EmptyRepositoryTest(unittest.TestCase):
def setUp(self):
self.dirname = get_temporary_location()
self.repo = LocalRepository(self.dirname)
self.assertFalse(os.path.exists(self.dirname))
self.assertFalse(self.repo.isValid())
def tearDown(self):
if os.path.exists(self.dirname):
delete_repository(self.repo)
class BasicRepositories(EmptyRepositoryTest):
def testRepositoryInit(self):
self.repo.init()
self.assertTrue(self.repo.isValid())
self.failUnless(os.path.isdir(self.dirname))
self.failUnless(os.path.isdir(os.path.join(self.dirname, ".git")))
def testConfiguration(self):
self.repo.init()
self.repo.config.setParameter('a.b.c', 2)
self.assertEquals(self.repo.config.getParameter('a.b.c'), '2')
def testRepositoryInitWhenExists(self):
os.mkdir(self.dirname)
self.repo.init()
self.failUnless(os.path.isdir(self.dirname))
self.failUnless(os.path.isdir(os.path.join(self.dirname, ".git")))
class ModifiedRepositoryTest(EmptyRepositoryTest):
FILENAME = "test.txt"
def setUp(self):
super(ModifiedRepositoryTest, self).setUp()
self.repo.init()
with open(os.path.join(self.repo.path, self.FILENAME), "wb") as f:
print >>f, "Hey!"
self.assertFalse(self.repo.isWorkingDirectoryClean())
class ModifiedRepositories(ModifiedRepositoryTest):
def testStatus(self):
untracked = self.repo.getUntrackedFiles()
self.assertEquals(untracked, [self.FILENAME])
def testAdding(self):
untracked_files = self.repo.getUntrackedFiles()
for u in untracked_files:
self.repo.add(u)
self.assertEquals(self.repo.getStagedFiles(), untracked_files)
self.assertFalse(self.repo.isWorkingDirectoryClean())
def testCommitting(self):
self.repo.addAll()
self.assertNotEquals(self.repo.getStagedFiles(), [])
c = self.repo.commit(message="test commit")
self.assertTrue(self.repo.isWorkingDirectoryClean())
self.assertEquals(self.repo.getStagedFiles(), [])
class CleaningUntrackedFiles(ModifiedRepositoryTest):
def _clean(self):
self.repo.cleanUntrackedFiles()
self.failIf(self.repo.getUntrackedFiles())
def testCleaningUpUntrackedFiles(self):
with open(os.path.join(self.repo.path, "dirty_file"), "wb") as f:
print >> f, "data"
self.failUnless(self.repo.getUntrackedFiles())
self._clean()
dirpath = os.path.join(self.repo.path, "unused_directory")
os.mkdir(dirpath)
self._clean()
self.failIf(os.path.exists(dirpath))
class TestAPI(ModifiedRepositoryTest):
def test_find_repository(self):
prev_path = os.path.realpath(".")
subpath = os.path.join(self.repo.path, "a", "b", "c")
os.makedirs(subpath)
os.chdir(subpath)
try:
repo = find_repository()
finally:
os.chdir(prev_path)
self.failUnless(repo.path == self.repo.path)
if __name__ == '__main__':
unittest.main()
|
data/Yelp/Testify/test/utils/test_turtle.py
|
import testify as T
from testify.contrib.doctestcase import DocTestCase
class TurtleTestCase(T.TestCase):
@T.setup
def build_turtle(self):
self.leonardo = T.turtle.Turtle()
def test_call(self):
"""Just call a turtle"""
ret = self.leonardo()
assert ret
T.assert_length(self.leonardo.returns, 1)
T.assert_call(self.leonardo, 0)
T.assert_equal(ret, self.leonardo.returns[0])
def test_attribute(self):
"""Check our attribute access"""
assert self.leonardo.is_awesome().and_can_chain().whatever_he_wants()
def test_call_record(self):
"""Check that calls are recorded"""
self.leonardo(1, 2, 3, quatro=4)
T.assert_length(self.leonardo.calls, 1)
T.assert_call(self.leonardo, 0, 1, 2, 3, quatro=4)
self.leonardo(5, six=6)
T.assert_call(self.leonardo, 1, 5, six=6)
def test_attribute_setting(self):
"""Check that we can set attributes and pull them back out"""
self.leonardo.color = "blue"
T.assert_equal(self.leonardo.color, "blue")
def test_attribute_persistence(self):
"""When an attribute is built, it should be persisted"""
weapon = self.leonardo.weapon
T.assert_equal(weapon, self.leonardo.weapon)
assert weapon is self.leonardo.weapon
class DocTest(DocTestCase):
module = T.turtle
|
data/XiaoMi/minos/client/deploy_chronos.py
|
import argparse
import os
import service_config
import subprocess
import sys
import urlparse
import deploy_utils
from log import Log
ALL_JOBS = ["chronos"]
def _get_chronos_service_config(args):
args.chronos_config = deploy_utils.get_service_config(args)
def generate_zk_jaas_config(args):
if not deploy_utils.is_security_enabled(args):
return ""
config_dict = args.chronos_config.configuration.generated_files["jaas.conf"]
for key, value in config_dict.items()[1:]:
if value != "true" and value != "false" and value.find("\"") == -1:
config_dict[key] = "\"" + value + "\""
header_line = config_dict["headerLine"]
return "Client {\n %s\n%s;\n};" % (header_line,
"\n".join([" %s=%s" % (key, value)
for (key, value) in config_dict.iteritems() if key != config_dict.keys()[0]]))
def generate_configs(args, job_name, host_id, instance_id):
chronos_cfg_dict = args.chronos_config.configuration.generated_files["chronos.cfg"]
hosts = args.chronos_config.jobs[job_name].hosts
chronos_cfg = deploy_utils.generate_properties_file(args, chronos_cfg_dict)
config_files = {
"chronos.cfg": chronos_cfg,
"jaas.conf" : generate_zk_jaas_config(args),
}
config_files.update(args.chronos_config.configuration.raw_files)
return config_files
def generate_run_scripts_params(args, host, job_name, host_id, instance_id):
job = args.chronos_config.jobs[job_name]
supervisor_client = deploy_utils.get_supervisor_client(host,
"chronos", args.chronos_config.cluster.name, job_name, instance_id=instance_id)
artifact_and_version = "chronos-" + args.chronos_config.cluster.version
jar_dirs = "$package_dir/lib/*"
log_level = deploy_utils.get_service_log_level(args, args.chronos_config)
params = job.get_arguments(args, args.chronos_config.cluster, args.chronos_config.jobs,
args.chronos_config.arguments_dict, job_name, host_id, instance_id)
script_dict = {
"artifact": artifact_and_version,
"job_name": job_name,
"jar_dirs": jar_dirs,
"run_dir": supervisor_client.get_run_dir(),
"params": params,
}
return script_dict
def generate_start_script(args, host, job_name, host_id, instance_id):
script_params = generate_run_scripts_params(args, host, job_name, host_id, instance_id)
return deploy_utils.create_run_script(
"%s/start.sh.tmpl" % deploy_utils.get_template_dir(),
script_params)
def install(args):
_get_chronos_service_config(args)
deploy_utils.install_service(args, "chronos", args.chronos_config, "chronos")
def cleanup(args):
_get_chronos_service_config(args)
cleanup_token = deploy_utils.confirm_cleanup(args,
"chronos", args.chronos_config)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
deploy_utils.cleanup_job("chronos", args.chronos_config,
hosts[host_id].ip, job_name, instance_id, cleanup_token)
def bootstrap_job(args, host, job_name, host_id, instance_id, cleanup_token):
args.chronos_config.parse_generated_config_files(args, job_name, host_id, instance_id)
deploy_utils.bootstrap_job(args, "chronos", "chronos",
args.chronos_config, host, job_name, instance_id, cleanup_token, '0')
start_job(args, host, job_name, host_id, instance_id)
def bootstrap(args):
_get_chronos_service_config(args)
cleanup_token = deploy_utils.confirm_bootstrap("chronos", args.chronos_config)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
bootstrap_job(args, hosts[host_id].ip, job_name, host_id, instance_id, cleanup_token)
def start_job(args, host, job_name, host_id, instance_id):
args.chronos_config.parse_generated_config_files(args, job_name, host_id, instance_id)
config_files = generate_configs(args, job_name, host_id, instance_id)
start_script = generate_start_script(args, host, job_name, host_id, instance_id)
http_url = deploy_utils.get_http_service_uri(host,
args.chronos_config.jobs[job_name].base_port, instance_id)
deploy_utils.start_job(args, "chronos", "chronos", args.chronos_config,
host, job_name, instance_id, start_script, http_url, **config_files)
def start(args):
if not args.skip_confirm:
deploy_utils.confirm_start(args)
_get_chronos_service_config(args)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
start_job(args, hosts[host_id].ip, job_name, host_id, instance_id)
def stop_job(args, host, job_name, instance_id):
deploy_utils.stop_job("chronos", args.chronos_config, host, job_name, instance_id)
def stop(args):
if not args.skip_confirm:
deploy_utils.confirm_stop(args)
_get_chronos_service_config(args)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
stop_job(args, hosts[host_id].ip, job_name, instance_id)
def restart(args):
if not args.skip_confirm:
deploy_utils.confirm_restart(args)
_get_chronos_service_config(args)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
stop_job(args, hosts[host_id].ip, job_name, instance_id)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
deploy_utils.wait_for_job_stopping("chronos",
args.chronos_config.cluster.name, job_name, hosts[host_id].ip, instance_id)
start_job(args, hosts[host_id].ip, job_name, host_id, instance_id)
def show(args):
_get_chronos_service_config(args)
for job_name in args.job or ALL_JOBS:
hosts = args.chronos_config.jobs[job_name].hosts
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.keys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
deploy_utils.show_job("chronos", args.chronos_config,
hosts[host_id].ip, job_name, instance_id)
def run_shell(args):
Log.print_critical("'shell' command is not supported!")
def pack(args):
Log.print_critical("'pack' command is not supported!")
def rolling_update(args):
if not args.job:
Log.print_critical("You must specify the job name to do rolling update")
_get_chronos_service_config(args)
job_name = args.job[0]
if not args.skip_confirm:
deploy_utils.confirm_action(args, "rolling_update")
Log.print_info("Rolling updating %s" % job_name)
hosts = args.chronos_config.jobs[job_name].hosts
wait_time = 0
args.task_map = deploy_utils.parse_args_host_and_task(args, hosts)
for host_id in args.task_map.keys() or hosts.iterkeys():
for instance_id in args.task_map.get(host_id) or range(hosts[host_id].instance_num):
instance_id = -1 if not deploy_utils.is_multiple_instances(host_id, hosts) else instance_id
deploy_utils.confirm_rolling_update(host_id, instance_id, wait_time)
stop_job(args, hosts[host_id].ip, job_name, instance_id)
deploy_utils.wait_for_job_stopping("chronos",
args.chronos_config.cluster.name, job_name, hosts[host_id].ip, instance_id)
start_job(args, hosts[host_id].ip, job_name, host_id, instance_id)
deploy_utils.wait_for_job_starting("chronos",
args.chronos_config.cluster.name, job_name, hosts[host_id].ip, instance_id)
wait_time = args.time_interval
Log.print_success("Rolling updating %s success" % job_name)
if __name__ == '__main__':
test()
|
data/Yubico/u2fval/u2fval/config.py
|
import sys
import imp
import errno
import os
from u2fval import default_settings
import logging
import logging.config
__all__ = [
'settings'
]
SETTINGS_FILE = os.getenv('U2FVAL_SETTINGS', os.path.join(
'/etc/yubico/u2fval/u2fval.conf'))
LOG_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(SETTINGS_FILE)),
'logging.conf')
VALUES = {
'DATABASE_CONFIGURATION': 'db',
'USE_MEMCACHED': 'mc',
'MEMCACHED_SERVERS': 'mc_hosts',
'METADATA': 'metadata',
'ALLOW_UNTRUSTED': 'allow_untrusted'
}
def parse(conf, settings={}):
for confkey, settingskey in VALUES.items():
try:
settings[settingskey] = conf.__getattribute__(confkey)
except AttributeError:
pass
return settings
settings = parse(default_settings)
dont_write_bytecode = sys.dont_write_bytecode
try:
sys.dont_write_bytecode = True
user_settings = imp.load_source('user_settings', SETTINGS_FILE)
settings = parse(user_settings, settings)
except IOError as e:
if e.errno not in [errno.ENOENT, errno.EACCES]:
raise e
finally:
sys.dont_write_bytecode = dont_write_bytecode
try:
logging.config.fileConfig(LOG_CONFIG_FILE)
except:
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.warning("Unable to configure logging. Logging to console.")
|
data/blueboxgroup/giftwrap/setup.py
|
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
data/OP2/PyOP2/pyop2/exceptions.py
|
"""OP2 exception types"""
class DataTypeError(TypeError):
"""Invalid type for data."""
class DimTypeError(TypeError):
"""Invalid type for dimension."""
class ArityTypeError(TypeError):
"""Invalid type for arity."""
class IndexTypeError(TypeError):
"""Invalid type for index."""
class NameTypeError(TypeError):
"""Invalid type for name."""
class SetTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Set`."""
class SizeTypeError(TypeError):
"""Invalid type for size."""
class SubsetIndexOutOfBounds(TypeError):
"""Out of bound index."""
class SparsityTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Sparsity`."""
class MapTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Map`."""
class DataSetTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.DataSet`."""
class MatTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Mat`."""
class DatTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Dat`."""
class KernelTypeError(TypeError):
"""Invalid type for :class:`pyop2.op2.Kernel`."""
class DataValueError(ValueError):
"""Illegal value for data."""
class IndexValueError(ValueError):
"""Illegal value for index."""
class ModeValueError(ValueError):
"""Illegal value for mode."""
class IterateValueError(ValueError):
"""Illegal value for iterate."""
class SetValueError(ValueError):
"""Illegal value for :class:`pyop2.op2.Set`."""
class MapValueError(ValueError):
"""Illegal value for :class:`pyop2.op2.Map`."""
class ConfigurationError(RuntimeError):
"""Illegal configuration value or type."""
class CompilationError(RuntimeError):
"""Error during JIT compilation"""
|
data/Pylons/substanced/substanced/db/tests/test_init.py
|
import unittest
from pyramid import testing
class Test_root_factory(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def _callFUT(self, request, transaction, get_connection, evolve_packages):
from .. import root_factory
return root_factory(request, transaction, get_connection,
evolve_packages)
def _makeRequest(self, app_root=None):
request = Dummy()
request.registry = DummyRegistry()
request.registry.content = Dummy()
request.registry.content.create = lambda *arg: app_root
return request
def test_without_app_root(self):
txn = DummyTransaction()
root = {}
gc = Dummy_get_connection(root)
ep = DummyFunction(True)
app_root = object()
request = self._makeRequest(app_root)
result = self._callFUT(request, txn, gc, ep)
self.assertEqual(result, app_root)
self.assertTrue(txn.committed)
self.assertTrue(txn.savepointed)
self.assertTrue(ep.called)
def test_with_app_root(self):
txn = DummyTransaction()
app_root = object()
root = {'app_root':app_root}
gc = Dummy_get_connection(root)
ep = DummyFunction(True)
request = testing.DummyRequest()
result = self._callFUT(request, txn, gc, ep)
self.assertEqual(result, app_root)
self.assertFalse(txn.committed)
class Test_includeme(unittest.TestCase):
def test_it(self):
from .. import (
includeme,
connection_opened,
connection_will_close,
ZODBConnectionOpened,
ZODBConnectionWillClose,
)
config = DummyConfig()
includeme(config)
self.assertEqual(
config.subscriptions,
[(connection_opened, ZODBConnectionOpened),
(connection_will_close, ZODBConnectionWillClose),
]
)
class Test_connection_opened(unittest.TestCase):
def test_it(self):
from .. import connection_opened
event = DummyEvent()
connection_opened(event)
self.assertEqual(event.request._zodb_tx_counts, (0,0))
class Test_connection_will_close(unittest.TestCase):
def _callFUT(self, event, statsd_incr):
from .. import connection_will_close
return connection_will_close(event, statsd_incr)
def test_no_tx_counts(self):
event = DummyEvent()
result = self._callFUT(event, None)
self.assertEqual(result, None)
def test_with_postitive_tx_counts(self):
event = DummyEvent(5,5)
event.request._zodb_tx_counts = (1, 1)
L = []
def statsd_incr(name, num, registry=None):
L.append((name, num))
self._callFUT(event, statsd_incr)
self.assertEqual(
L,
[('zodb.loads', 4), ('zodb.stores', 4)]
)
def test_with_zero_tx_counts(self):
event = DummyEvent(1,1)
event.request._zodb_tx_counts = (1, 1)
L = []
self._callFUT(event, None)
self.assertEqual(
L,
[]
)
class DummyTransaction(object):
committed = False
savepointed = False
def commit(self):
self.committed = True
def savepoint(self):
self.savepointed = True
class Dummy_get_connection(object):
def __init__(self, root):
self._root = root
def root(self):
return self._root
def __call__(self, request):
return self
class DummyFunction(object):
called = False
def __init__(self, result):
self.result = result
def __call__(self, *args, **kw):
self.called = True
self.args = args
self.kw = kw
return self.result
class Dummy(object):
pass
class DummyRegistry(object):
def notify(self, event):
self.event = event
class DummyConfig(object):
def __init__(self):
self.subscriptions = []
def add_subscriber(self, fn, event_type):
self.subscriptions.append((fn, event_type))
class DummyConnection(object):
def __init__(self, loads, stores):
self.loads = loads
self.stores = stores
def getTransferCounts(self):
return (self.loads, self.stores)
class DummyEvent(object):
def __init__(self, loads=0, stores=0):
self.request = testing.DummyRequest()
self.conn = DummyConnection(loads, stores)
|
data/ImageEngine/gaffer/python/GafferImageUI/__init__.py
|
from _GafferImageUI import *
import DisplayUI
from FormatPlugValueWidget import FormatPlugValueWidget
from ChannelMaskPlugValueWidget import ChannelMaskPlugValueWidget
import OpenImageIOReaderUI
import ImageReaderUI
import ImageViewToolbar
import ImageTransformUI
import ConstantUI
import ImageSwitchUI
import ColorSpaceUI
import ImageContextVariablesUI
import ImageStatsUI
import DeleteChannelsUI
import ObjectToImageUI
import ClampUI
import ImageWriterUI
import GradeUI
import ImageTimeWarpUI
import ImageSamplerUI
import MergeUI
import ImageNodeUI
import ChannelDataProcessorUI
import ImageProcessorUI
import ImageMetadataUI
import DeleteImageMetadataUI
import CopyImageMetadataUI
import ImageLoopUI
import ShuffleUI
import PremultiplyUI
import UnpremultiplyUI
import CropUI
import ResizeUI
import ResampleUI
import LUTUI
import CDLUI
import DisplayTransformUI
import OffsetUI
import BlurUI
import ShapeUI
import TextUI
import WarpUI
import UVWarpUI
__import__( "IECore" ).loadConfig( "GAFFER_STARTUP_PATHS", {}, subdirectory = "GafferImageUI" )
|
data/adamlwgriffiths/Pyrr/pyrr/tests/test_quaternion.py
|
try:
import unittest2 as unittest
except:
import unittest
import numpy as np
from pyrr import quaternion
class test_quaternion(unittest.TestCase):
def test_import(self):
import pyrr
pyrr.quaternion
from pyrr import quaternion
def test_create(self):
result = quaternion.create()
np.testing.assert_almost_equal(result, [0., 0., 0., 1.], decimal=5)
self.assertTrue(result.dtype == np.float)
def test_create_parameters(self):
result = quaternion.create(1.0, 2.0, 3.0, 4.0)
np.testing.assert_almost_equal(result, [1.0, 2.0, 3.0, 4.0], decimal=5)
self.assertTrue(result.dtype == np.float)
def test_create_from_x_rotation(self):
q = quaternion.create_from_x_rotation(np.pi)
self.assertTrue(np.allclose(q, [1., 0., 0., 0.]))
q = quaternion.create_from_x_rotation(np.pi / 2.)
self.assertTrue(np.allclose(q, [np.sqrt(0.5), 0., 0., np.sqrt(0.5)]))
q = quaternion.create_from_x_rotation(-np.pi / 2.)
self.assertTrue(np.allclose(q, [-np.sqrt(0.5), 0., 0., np.sqrt(0.5)]))
def test_create_from_y_rotation(self):
q = quaternion.create_from_y_rotation(np.pi)
self.assertTrue(np.allclose(q, [0., 1., 0., 0.]))
q = quaternion.create_from_y_rotation(np.pi / 2.)
self.assertTrue(np.allclose(q, [0., np.sqrt(0.5), 0., np.sqrt(0.5)]))
q = quaternion.create_from_y_rotation(-np.pi / 2.)
def test_create_from_z_rotation(self):
q = quaternion.create_from_z_rotation(np.pi)
self.assertTrue(np.allclose(q, [0., 0., 1., 0.]))
q = quaternion.create_from_z_rotation(np.pi / 2.)
self.assertTrue(np.allclose(q, [0., 0., np.sqrt(0.5), np.sqrt(0.5)]))
q = quaternion.create_from_z_rotation(-np.pi / 2.)
def test_create_from_axis_rotation(self):
result = quaternion.create_from_axis_rotation([0.57735, 0.57735, 0.57735], np.pi)
np.testing.assert_almost_equal(result, [5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17], decimal=3)
self.assertTrue(result.dtype == np.float)
def test_create_from_axis_rotation_non_normalised(self):
result = quaternion.create_from_axis_rotation([1., 1., 1.], np.pi)
np.testing.assert_almost_equal(result, [5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17], decimal=3)
self.assertTrue(result.dtype == np.float)
def test_create_from_matrix_unit(self):
result = quaternion.create_from_matrix(np.eye(3))
np.testing.assert_almost_equal(result, [0., 0., 0., 1.], decimal=5)
self.assertTrue(result.dtype == np.float)
def test_create_from_matrix_x(self):
result = quaternion.create_from_matrix([
[1., 0., 0.],
[0., -1., 0.],
[0., 0., -1.],
])
np.testing.assert_almost_equal(result, [1., 0., 0., 0.], decimal=5)
self.assertTrue(result.dtype == np.float)
def test_create_from_matrix_y(self):
result = quaternion.create_from_matrix([
[-1., 0., 0.],
[0., 1., 0.],
[0., 0., -1.],
])
np.testing.assert_almost_equal(result, [0., 1., 0., 0.], decimal=5)
self.assertTrue(result.dtype == np.float)
def test_create_from_matrix_z(self):
result = quaternion.create_from_matrix([
[-1., 0., 0.],
[0., -1., 0.],
[0., 0., 1.],
])
np.testing.assert_almost_equal(result, [0., 0., 1., 0.], decimal=5)
self.assertTrue(result.dtype == np.float)
@unittest.skip('Not implemented')
def test_create_from_eulers(self):
pass
@unittest.skip('Not implemented')
def test_create_from_inverse_of_eulers(self):
pass
def test_cross(self):
q1 = quaternion.create_from_x_rotation(np.pi / 2.0)
q2 = quaternion.create_from_x_rotation(-np.pi / 2.0)
result = quaternion.cross(q1, q2)
np.testing.assert_almost_equal(result, quaternion.create(), decimal=5)
def test_is_zero_length(self):
result = quaternion.is_zero_length([1., 0., 0., 0.])
self.assertFalse(result)
def test_is_zero_length_zero(self):
result = quaternion.is_zero_length([0., 0., 0., 0.])
self.assertTrue(result)
def test_is_non_zero_length(self):
result = quaternion.is_non_zero_length([1., 0., 0., 0.])
self.assertTrue(result)
def test_is_non_zero_length_zero(self):
result = quaternion.is_non_zero_length([0., 0., 0., 0.])
self.assertFalse(result)
def test_squared_length_identity(self):
result = quaternion.squared_length([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_squared_length(self):
result = quaternion.squared_length([1., 1., 1., 1.])
np.testing.assert_almost_equal(result, 4., decimal=5)
def test_squared_length_batch(self):
result = quaternion.squared_length([
[0., 0., 0., 1.],
[1., 1., 1., 1.],
])
np.testing.assert_almost_equal(result, [1., 4.], decimal=5)
def test_length_identity(self):
result = quaternion.length([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_length(self):
result = quaternion.length([1., 1., 1., 1.])
np.testing.assert_almost_equal(result, 2., decimal=5)
def test_length_batch(self):
result = quaternion.length([
[0., 0., 0., 1.],
[1., 1., 1., 1.],
])
np.testing.assert_almost_equal(result, [1., 2.], decimal=5)
def test_normalise_identity(self):
result = quaternion.normalise([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, [0., 0., 0., 1.], decimal=5)
def test_normalise_non_identity(self):
result = quaternion.normalise([1., 2., 3., 4.])
np.testing.assert_almost_equal(result, [1. / np.sqrt(30.), np.sqrt(2. / 15.), np.sqrt(3. / 10.), 2. * np.sqrt(2. / 15.)], decimal=5)
def test_normalise_batch(self):
result = quaternion.normalise([
[0., 0., 0., 1.],
[1., 2., 3., 4.],
])
expected = [
[0., 0., 0., 1.],
[1. / np.sqrt(30.), np.sqrt(2. / 15.), np.sqrt(3. / 10.), 2. * np.sqrt(2. / 15.)],
]
np.testing.assert_almost_equal(result, expected, decimal=5)
def test_rotation_angle(self):
result = quaternion.rotation_angle([5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17])
np.testing.assert_almost_equal(result, np.pi, decimal=5)
def test_rotation_axis(self):
result = quaternion.rotation_axis([5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17])
np.testing.assert_almost_equal(result, [0.57735, 0.57735, 0.57735], decimal=5)
def test_dot_adjacent(self):
result = quaternion.dot([1., 0., 0., 0.], [0., 1., 0., 0.])
np.testing.assert_almost_equal(result, 0.0, decimal=5)
def test_dot_parallel(self):
result = quaternion.dot([0., 1., 0., 0.], [0., 1., 0., 0.])
np.testing.assert_almost_equal(result, 1.0, decimal=5)
def test_dot_angle(self):
result = quaternion.dot([.2, .2, 0., 0.], [2., -.2, 0., 0.])
np.testing.assert_almost_equal(result, 0.36, decimal=5)
def test_dot_batch(self):
result = quaternion.dot([
[1., 0., 0., 0.],
[0., 1., 0., 0.],
[.2, .2, 0., 0.]
], [
[0., 1., 0., 0.],
[0., 1., 0., 0.],
[2., -.2, 0., 0.]
])
expected = [0., 1., 0.36]
np.testing.assert_almost_equal(result, expected, decimal=5)
def test_conjugate(self):
result = quaternion.conjugate([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, [0., 0., 0., 1.], decimal=5)
def test_conjugate_rotation(self):
result = quaternion.conjugate([5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17])
np.testing.assert_almost_equal(result, [-0.57735, -0.57735, -0.57735, 6.12323e-17], decimal=5)
@unittest.skip('Not implemented')
def test_power(self):
pass
def test_inverse(self):
result = quaternion.inverse([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, [0., 0., 0., 1.], decimal=5)
def test_inverse_rotation(self):
result = quaternion.inverse([5.77350000e-01, 5.77350000e-01, 5.77350000e-01, 6.12323400e-17])
np.testing.assert_almost_equal(result, [-0.577351, -0.577351, -0.577351, 6.12324e-17], decimal=5)
def test_inverse_non_unit(self):
q = [1, 2, 3, 4]
result = quaternion.inverse(q)
expected = quaternion.conjugate(q) / quaternion.length(q)
np.testing.assert_almost_equal(result, expected, decimal=5)
def test_negate_unit(self):
result = quaternion.negate([0., 0., 0., 1.])
np.testing.assert_almost_equal(result, [0., 0., 0., -1.], decimal=5)
def test_negate(self):
result = quaternion.negate([1., 2., 3., 4.])
np.testing.assert_almost_equal(result, [-1., -2., -3., -4.], decimal=5)
def test_apply_to_vector_unit_x(self):
result = quaternion.apply_to_vector([0., 0., 0., 1.], [1., 0., 0.])
np.testing.assert_almost_equal(result, [1., 0., 0.], decimal=5)
def test_apply_to_vector_x(self):
q = quaternion.create_from_x_rotation(np.pi)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0.,-1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 0.,-1.]))
q = quaternion.create_from_x_rotation(np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0., 0., 1.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0.,-1., 0.]))
q = quaternion.create_from_x_rotation(-np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0., 0.,-1.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 1., 0.]))
def test_apply_to_vector_y(self):
q = quaternion.create_from_y_rotation(np.pi)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [-1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0., 1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 0.,-1.]))
q = quaternion.create_from_y_rotation(np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [0., 0.,-1.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0., 1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [1., 0., 0.]))
q = quaternion.create_from_y_rotation(-np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [0., 0., 1.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0., 1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [-1., 0., 0.]))
def test_apply_to_vector_z(self):
q = quaternion.create_from_z_rotation(np.pi)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [-1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [0.,-1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 0., 1.]))
q = quaternion.create_from_z_rotation(np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [0., 1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [-1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 0., 1.]))
q = quaternion.create_from_z_rotation(-np.pi / 2.)
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [1., 0., 0.]), [0.,-1., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 1., 0.]), [1., 0., 0.]))
self.assertTrue(np.allclose(quaternion.apply_to_vector(q, [0., 0., 1.]), [0., 0., 1.]))
def test_identity(self):
i = quaternion.create(1., 0., 0., 0.)
j = quaternion.create(0., 1., 0., 0.)
k = quaternion.create(0., 0., 1., 0.)
one = quaternion.create(0., 0., 0., 1.)
i1 = quaternion.cross(i, one)
j1 = quaternion.cross(j, one)
k1 = quaternion.cross(k, one)
_1i = quaternion.cross(one, i)
_1j = quaternion.cross(one, j)
_1k = quaternion.cross(one, k)
self.assertTrue(np.allclose(i1, _1i, i))
self.assertTrue(np.allclose(j1, _1j, j))
self.assertTrue(np.allclose(k1, _1k, k))
ii = quaternion.cross(i, i)
kk = quaternion.cross(k, k)
jj = quaternion.cross(j, j)
ijk = quaternion.cross(quaternion.cross(i, j), k)
self.assertTrue(np.allclose(ii, -one))
self.assertTrue(np.allclose(jj, -one))
self.assertTrue(np.allclose(kk, -one))
self.assertTrue(np.allclose(ijk, -one))
ij = quaternion.cross(i, j)
ji = quaternion.cross(j, i)
jk = quaternion.cross(j, k)
kj = quaternion.cross(k, j)
ki = quaternion.cross(k, i)
ik = quaternion.cross(i, k)
self.assertTrue(np.allclose(ij, k))
self.assertTrue(np.allclose(ji, -k))
self.assertTrue(np.allclose(jk, i))
self.assertTrue(np.allclose(kj, -i))
self.assertTrue(np.allclose(ki, j))
self.assertTrue(np.allclose(ik, -j))
ijkk = quaternion.cross(quaternion.cross(ij, k), k)
ijk2 = quaternion.cross(ij, quaternion.cross(k, k))
ij_m1 = quaternion.cross(ij, -one)
self.assertTrue(np.allclose(ijkk, ijk2))
self.assertTrue(np.allclose(ijk2, ij_m1))
if __name__ == '__main__':
unittest.main()
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/datatypes/domain/zone.py
|
import copy
from openmdao.lib.datatypes.domain.flow import FlowSolution
from openmdao.lib.datatypes.domain.grid import GridCoordinates
CARTESIAN = 'Cartesian'
CYLINDRICAL = 'Cylindrical'
_COORD_SYSTEMS = (CARTESIAN, CYLINDRICAL)
class Zone(object):
""" One zone in a possibly multi-zone :class:`DomainObj`. """
def __init__(self):
self.grid_coordinates = GridCoordinates()
self.flow_solution = FlowSolution()
self.reference_state = None
self._coordinate_system = CARTESIAN
self.right_handed = True
self.symmetry = None
self.symmetry_axis = None
self.symmetry_instances = 1
@property
def shape(self):
""" Coordinate index limits, not including 'ghost/rind' planes. """
return self.grid_coordinates.shape
@property
def extent(self):
""" Coordinate ranges, not including 'ghost/rind' planes. """
return self.grid_coordinates.extent
def _get_coord_sys(self):
return self._coordinate_system
def _set_coord_sys(self, sys):
if sys in _COORD_SYSTEMS:
self._coordinate_system = sys
else:
raise ValueError('invalid coordinate system %r' % sys)
coordinate_system = property(_get_coord_sys, _set_coord_sys,
doc='Coordinate system in use.')
def copy(self):
""" Returns a deep copy of self. """
return copy.deepcopy(self)
def is_equivalent(self, other, logger, tolerance=0.):
"""
Test if self and `other` are equivalent.
other: :class:`Zone`
Zone to check against.
logger: :class:`Logger` or None
Used to log debug messages that will indicate what if anything is
not equivalent.
tolerance: float
The maximum relative difference in array values to be considered
equivalent.
"""
if not isinstance(other, Zone):
logger.debug('other is not a Zone object.')
return False
if self.coordinate_system != other.coordinate_system:
logger.debug('coordinate_systems are not equal.')
return False
if self.right_handed != other.right_handed:
logger.debug('handedness is not equal.')
return False
if self.symmetry != other.symmetry:
logger.debug('symmetry is not equal.')
return False
if self.symmetry_axis != other.symmetry_axis:
logger.debug('symmetry_axis is not equal.')
return False
if self.symmetry_instances != other.symmetry_instances:
logger.debug('symmetry_instances is not equal.')
return False
if not self.grid_coordinates.is_equivalent(other.grid_coordinates,
logger, tolerance):
return False
if not self.flow_solution.is_equivalent(other.flow_solution, logger,
tolerance):
return False
return True
def extract(self, imin, imax, jmin=None, jmax=None, kmin=None, kmax=None,
grid_ghosts=None, flow_ghosts=None):
"""
Construct a new :class:`Zone` from grid and flow data extracted
from the specified region. Symmetry data is copied.
imin, imax, jmin, jmax, kmin, kmax: int
Specifies the region to extract neglecting ghost/rind planes.
Negative values are relative to the size in that dimension,
so -1 refers to the last element. For 2D zones omit kmin and kmax.
For 1D zones omit jmin, jmax, kmin, and kmax.
grid_ghosts: int[]
The number of ghost/rind planes for the new zone's grid.
If ``None`` the grid's existing specification is used.
flow_ghosts: int[]
The number of ghost/rind planes for the new zone's flow solution.
If ``None`` the flow's existing specification is used.
"""
zone = Zone()
zone.grid_coordinates = \
self.grid_coordinates.extract(imin, imax, jmin, jmax, kmin, kmax,
grid_ghosts)
zone.flow_solution = \
self.flow_solution.extract(imin, imax, jmin, jmax, kmin, kmax,
flow_ghosts)
if self.reference_state is not None:
zone.reference_state = self.reference_state.copy()
zone.coordinate_system = self.coordinate_system
zone.right_handed = self.right_handed
zone.symmetry = self.symmetry
zone.symmetry_axis = self.symmetry_axis
zone.symmetry_instances = self.symmetry_instances
return zone
def extend(self, axis, delta, grid_points, flow_points, normal=None):
"""
Construct a new :class:`Zone` by linearly extending the grid and
replicating the flow. Symmetry data is copied.
axis: 'i', 'j', or 'k'
Index axis to extend.
delta: float.
Fractional amount to move for each point. Multiplies the 'edge'
delta in the `axis` direction or the appropriate component of
`normal`. A negative value adds points before the current
zero-index of `axis`.
grid_points: int >= 0
Number of points to add in `axis` dimension.
flow_points: int >= 0
Number of points to add in `axis` dimension.
normal: float[]
For cases where only a single point exists in the `axis` direction,
this specifies the direction to move. If not specified, an
axis-aligned direction is selected based on minimum grid extent.
"""
zone = Zone()
if grid_points > 0:
zone.grid_coordinates = \
self.grid_coordinates.extend(axis, delta, grid_points, normal)
else:
zone.grid_coordinates = self.grid_coordinates.copy()
if flow_points > 0:
zone.flow_solution = \
self.flow_solution.extend(axis, delta, flow_points)
else:
zone.flow_solution = self.flow_solution.copy()
if self.reference_state is not None:
zone.reference_state = self.reference_state.copy()
zone.coordinate_system = self.coordinate_system
zone.right_handed = self.right_handed
zone.symmetry = self.symmetry
zone.symmetry_axis = self.symmetry_axis
zone.symmetry_instances = self.symmetry_instances
return zone
def make_cartesian(self, axis='z'):
"""
Convert to Cartesian coordinate system.
axis: string
Specifies which is the cylinder axis ('z' or 'x').
"""
if self.coordinate_system != CARTESIAN:
self.flow_solution.make_cartesian(self.grid_coordinates, axis)
self.grid_coordinates.make_cartesian(axis)
self.coordinate_system = CARTESIAN
def make_cylindrical(self, axis='z'):
"""
Convert to cylindrical coordinate system.
axis: string
Specifies which is the cylinder axis ('z' or 'x').
"""
if self.coordinate_system != CYLINDRICAL:
self.grid_coordinates.make_cylindrical(axis)
self.flow_solution.make_cylindrical(self.grid_coordinates, axis)
self.coordinate_system = CYLINDRICAL
def make_left_handed(self):
""" Convert to left-handed coordinate system. """
if self.right_handed:
self.grid_coordinates.flip_z()
self.flow_solution.flip_z()
self.right_handed = False
def make_right_handed(self):
""" Convert to right-handed coordinate system. """
if not self.right_handed:
self.grid_coordinates.flip_z()
self.flow_solution.flip_z()
self.right_handed = True
def translate(self, delta_x, delta_y, delta_z):
"""
Translate coordinates.
delta_x, delta_y, delta_z: float
Amount of translation along the corresponding axis.
"""
if self.coordinate_system == CARTESIAN:
self.grid_coordinates.translate(delta_x, delta_y, delta_z)
else:
raise RuntimeError('Zone not in cartesian coordinates')
def rotate_about_x(self, deg):
"""
Rotate about the X axis.
deg: float (degrees)
Amount of rotation.
"""
if self.coordinate_system == CARTESIAN:
self.grid_coordinates.rotate_about_x(deg)
self.flow_solution.rotate_about_x(deg)
else:
raise RuntimeError('Zone not in cartesian coordinates')
def rotate_about_y(self, deg):
"""
Rotate about the Y axis.
deg: float (degrees)
Amount of rotation.
"""
if self.coordinate_system == CARTESIAN:
self.grid_coordinates.rotate_about_y(deg)
self.flow_solution.rotate_about_y(deg)
else:
raise RuntimeError('Zone not in cartesian coordinates')
def rotate_about_z(self, deg):
"""
Rotate about the Z axis.
deg: float (degrees)
Amount of rotation.
"""
if self.coordinate_system == CARTESIAN:
self.grid_coordinates.rotate_about_z(deg)
self.flow_solution.rotate_about_z(deg)
else:
raise RuntimeError('Zone not in cartesian coordinates')
def promote(self):
""" Promote from N-dimensional to N+1 dimensional index space. """
self.grid_coordinates.promote()
self.flow_solution.promote()
def demote(self):
""" Demote from N-dimensional to N-1 dimensional index space. """
self.grid_coordinates.demote()
self.flow_solution.demote()
|
data/RobotWebTools/rosbridge_suite/rosbridge_library/test/experimental/complex_srv+tcp/test_non-ros_service_server_complex-srv.py
|
import sys
import socket
import time
from random import randint
from rosbridge_library.util import json
tcp_socket_timeout = 10
max_msg_length = 20000
rosbridge_ip = "localhost"
rosbridge_port = 9090
service_type = "rosbridge_library/TestNestedService"
service_name = "nested_srv"
send_fragment_size = 1000
send_fragment_delay = 0.000
receive_fragment_size = 10
receive_message_intervall = 0.0
def calculate_service_response(request):
request_object = json.loads(request)
args = request_object["args"]
message = {"data": {"data": 42.0}}
"""
IMPORTANT!
use base64 encoding to avoid JSON-parsing problems!
--> use .decode("base64","strict") at client side
"""
service_response_data = message
response_object = { "op": "service_response",
"id": request_object["id"],
"data": service_response_data
}
response_message = json.dumps(response_object)
return response_message
buffer = ""
def connect_tcp_socket():
tcp_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp_sock.settimeout(tcp_socket_timeout)
tcp_sock.connect((rosbridge_ip, rosbridge_port))
return tcp_sock
def advertise_service():
advertise_message_object = {"op":"advertise_service",
"type": service_type,
"service": service_name,
"fragment_size": receive_fragment_size,
"message_intervall": receive_message_intervall
}
advertise_message = json.dumps(advertise_message_object)
tcp_socket.send(str(advertise_message))
def unadvertise_service():
unadvertise_message_object = {"op":"unadvertise_service",
"service": service_name
}
unadvertise_message = json.dumps(unadvertise_message_object)
tcp_socket.send(str(unadvertise_message))
def wait_for_service_request():
data = None
global buffer
try:
done = False
global buffer
while not done:
incoming = tcp_socket.recv(max_msg_length)
if incoming == '':
print "connection closed by peer"
sys.exit(1)
buffer = buffer + incoming
try:
data_object = json.loads(buffer)
if data_object["op"] == "call_service":
data = buffer
done = True
return data
except Exception, e:
pass
try:
result_string = buffer.split("}{")
result = []
for fragment in result_string:
if fragment[0] != "{":
fragment = "{"+fragment
if fragment[len(fragment)-1] != "}":
fragment = fragment + "}"
result.append(json.loads(fragment))
try:
fragment_count = len(result)
announced = int(result[0]["total"])
if fragment_count == announced:
reconstructed = ""
sorted_result = [None] * fragment_count
unsorted_result = []
for fragment in result:
unsorted_result.append(fragment)
sorted_result[int(fragment["num"])] = fragment
for fragment in sorted_result:
reconstructed = reconstructed + fragment["data"]
buffer = ""
done = True
print "reconstructed message from", len(result), "fragments"
return reconstructed
except Exception, e:
print "not possible to defragment:", buffer
print e
except Exception, e:
print "defrag_error:", buffer
print e
pass
except Exception, e:
pass
return data
def send_service_response(response):
tcp_socket.send(response)
def list_of_fragments(full_message, fragment_size):
message_id = randint(0,64000)
fragments = []
cursor = 0
while cursor < len(full_message):
fragment_begin = cursor
if len(full_message) < cursor + fragment_size:
fragment_end = len(full_message)
cursor = len(full_message)
else:
fragment_end = cursor + fragment_size
cursor += fragment_size
fragment = full_message[fragment_begin:fragment_end]
fragments.append(fragment)
fragmented_messages_list = []
if len(fragments) > 1:
for count, fragment in enumerate(fragments):
fragmented_message_object = {"op":"fragment",
"id": str(message_id),
"data": str(fragment),
"num": count,
"total": len(fragments)
}
fragmented_message = json.dumps(fragmented_message_object)
fragmented_messages_list.append(fragmented_message)
else:
fragmented_messages_list.append(str(fragment))
return fragmented_messages_list
tcp_socket = connect_tcp_socket()
advertise_service()
print "service provider started and waiting for requests"
try:
while True:
data = None
try:
data = wait_for_service_request()
if data == '':
break
elif data != None and len(data) > 0:
response = calculate_service_response(data)
print "response calculated, now splitting into fragments.."
fragment_list = list_of_fragments(response, send_fragment_size)
print "sending", len(fragment_list), "messages as response"
for fragment in fragment_list:
send_service_response(fragment)
time.sleep(send_fragment_delay)
except Exception, e:
print e
pass
except KeyboardInterrupt:
try:
unadvertise_service()
tcp_socket.close()
except Exception, e:
print e
print "non-ros_service_server stopped because user pressed \"Ctrl-C\""
|
data/Neohapsis/bbqsql/scripts/test_server.py
|
"""This is a simple webserver vulnerable to SQLi injection
make your query string look like this: http://127.0.0.1:8090/time?row_index=1&character_index=1&character_value=95&comparator=>&sleep=1
command line usage:
python ./test_server.py [--rows=50 --cols=150]
:rows - this controls how many rows of random data to use for the database
:cols - this controls how many rows of random data to use for the database
"""
import eventlet
from eventlet import wsgi
from eventlet.green import time
from urlparse import parse_qs
from random import random,choice
datas = ['hello','world']
comparators = ['<','=','>','false']
def parse_response(env, start_response):
'''Parse out all necessary information and determine if the query resulted in a match'''
delay = random()
time.sleep(delay/10)
try:
params = parse_qs(env['QUERY_STRING'])
row_index = int(params['row_index'][0])
char_index = int(params['character_index'][0]) - 1
test_char = int(params['character_value'][0])
comparator = comparators.index(params['comparator'][0]) - 1
try:
sleep_int = float(params['sleep'].pop(0))
except KeyError:
sleep_int = 1
current_character = datas[row_index][char_index]
truth = (cmp(ord(current_character),test_char) == comparator)
response = types[env['PATH_INFO']](test_char, current_character, comparator, sleep_int, start_response,truth)
return response
except:
start_response('400 Bad Request', [('Content-Type', 'text/plain')])
return ['error\r\n']
def time_based_blind(test_char, current_character, comparator, sleep_int, start_response,truth):
sleep_time = sleep_int * truth
time.sleep(sleep_time)
start_response('200 OK', [('Content-Type', 'text/plain')])
return ['Hello!\r\n']
def boolean_based_error(test_char, current_character, comparator, env, start_response,truth):
if truth:
start_response('200 OK', [('Content-Type', 'text/plain')])
return ['Hello, im a bigger cheese in this cruel World!\r\n']
else:
start_response('404 File Not Found', [('Content-Type', 'text/plain')])
return ['file not found: error\r\n']
def boolean_based_size(test_char, current_character, comparator, env, start_response,truth):
if truth:
start_response('200 OK', [('Content-Type', 'text/plain')])
return ['Hello, you just submitted a query and i found a match\r\n']
else:
start_response('200 OK', [('Content-Type', 'text/plain')])
return ['Hello, no match!\r\n']
types = {'/time':time_based_blind,'/error':boolean_based_error,'/boolean':boolean_based_size}
if __name__ == "__main__":
print "\n"
print "bbqsql http server\n\n"
print "used to unit test boolean, blind, and error based sql injection"
print "use the following syntax: http://127.0.0.1:8090/time?row_index=1&character_index=1&character_value=95&comparator=>&sleep=1"
print "path can be set to /time, /error, or /boolean"
print "\n"
from sys import argv
import re
CHARSET = [chr(x) for x in xrange(32,127)]
rre = re.compile(u'--rows=[0-9]+')
cre = re.compile(u'--cols=[0-9]+')
rows = filter(rre.match,argv)
cols = filter(cre.match,argv)
if rows and cols:
rows = rows[0]
cols = cols[0]
CHARSET = [chr(x) for x in xrange(32,127)]
datas = []
for asdf in range(5):
datas.append("")
for fdsa in range(100):
datas[-1] += choice(CHARSET)
wsgi.server(eventlet.listen(('', 8090)), parse_response)
|
data/T-002/pycast/pycast/errors/meansquarederror.py
|
from pycast.errors.baseerrormeasure import BaseErrorMeasure
class MeanSquaredError(BaseErrorMeasure):
"""Implements the mean squared error measure.
Explanation:
http://en.wikipedia.org/wiki/Mean_squared_error
"""
def _calculate(self, startingPercentage, endPercentage, startDate, endDate):
"""This is the error calculation function that gets called by :py:meth:`BaseErrorMeasure.get_error`.
Both parameters will be correct at this time.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:param float startDate: Epoch representing the start date used for error calculation.
:param float endDate: Epoch representing the end date used in the error calculation.
:return: Returns a float representing the error.
:rtype: float
"""
errorValues = self._get_error_values(startingPercentage, endPercentage, startDate, endDate)
return float(sum(errorValues)) / float(len(errorValues))
def local_error(self, originalValue, calculatedValue):
"""Calculates the error between the two given values.
:param list originalValue: List containing the values of the original data.
:param list calculatedValue: List containing the values of the calculated TimeSeries that
corresponds to originalValue.
:return: Returns the error measure of the two given values.
:rtype: numeric
"""
originalValue = originalValue[0]
calculatedValue = calculatedValue[0]
return (calculatedValue - originalValue)**2.0
MSE = MeanSquaredError
|
data/JeremyOT/Toto/toto/workerconnection.py
|
import toto
import cPickle as pickle
import zlib
import logging
from threading import Thread
from tornado.options import options
from tornado.gen import Task
from collections import deque
from time import time
from uuid import uuid4
from traceback import format_exc
from toto.options import safe_define
safe_define("worker_compression_module", type=str, help="The module to use for compressing and decompressing messages to workers. The module must have 'decompress' and 'compress' methods. If not specified, no compression will be used. Only the default instance will be affected")
safe_define("worker_serialization_module", type=str, help="The module to use for serializing and deserializing messages to workers. The module must have 'dumps' and 'loads' methods. If not specified, cPickle will be used. Only the default instance will be affected")
safe_define("worker_serialization_mime", type=str, default='application/pickle', help="Used by HttpWorkerConnection in its Content-Type header.")
safe_define("worker_timeout", default=10.0, help="The default worker (instance()) will wait at least this many seconds before retrying a request (if retry is true), or timing out (if retry is false). Negative values will never retry or timeout. Note: This abs(value) is also the minimum resolution of any request-specific timeouts. Must not be 0.")
safe_define("worker_auto_retry", default=False, help="If True, the default timeout behavior of a worker RPC will be to retry instead of failing when the timeout is reached.")
safe_define("worker_retry_count", default=0, help="The maximum number of times to retry a request after timeout. Used by HttpWorkerConnection instead of worker_auto_retry.")
safe_define("worker_address", default='', help="This is the address that toto.workerconnection.invoke(method, params) will send tasks too (As specified in the worker conf file). A comma separated list may be used to round-robin load balance tasks between workers.")
safe_define("worker_transport", default='zmq', help="Either zmq or http to select which transport to use for worker communication.")
WORKER_SOCKET_CONNECT = 'CONNECT'
WORKER_SOCKET_DISCONNECT = 'DISCONNECT'
class WorkerConnection(object):
'''Use a ``WorkerConnection`` to make RPCs to the remote worker service(s) or worker/router specified by ``address``.
``address`` may be either an enumerable of address strings or a string of comma separated addresses. RPC retries
and timeouts will happen by at most every ``abs(timeout)`` seconds when a periodic callback runs through all active
messages and checks for prolonged requests. This is also the default timeout for any new calls. ``timeout`` must not be
``0``.
Optionally pass any object or module with ``compress`` and ``decompress`` methods as the ``compression`` parameter to
compress messages. The module must implement the same algorithm used on the worker service. By default, messages are not
compressed.
Optionally pass any object or module with ``dumps`` and ``loads`` methods that convert an ``object`` to and from a
``str`` to replace the default ``cPickle`` serialization with a protocol of your choice.
Use ``auto_retry`` to specify whether or not messages should be retried by default. Retrying messages can cause substantial
congestion in your worker service. Use with caution.
'''
def __getattr__(self, path):
return WorkerInvocation(path, self)
def log_error(self, error):
logging.error(repr(error))
def enable_traceback_logging(self):
from new import instancemethod
from traceback import format_exc
def log_error(self, e):
logging.error(format_exc())
self.log_error = instancemethod(log_error, self)
@classmethod
def instance(cls):
'''Returns the default instance of ``WorkerConnection`` as configured by the options prefixed
with ``worker_``, instantiating it if necessary. Import the ``workerconnection`` module within
your ``TotoService`` and run it with ``--help`` to see all available options.
'''
if not hasattr(cls, '_instance'):
if options.worker_transport == 'http':
from toto.httpworkerconnection import HTTPWorkerConnection
cls._instance = HTTPWorkerConnection.instance()
else:
from toto.zmqworkerconnection import ZMQWorkerConnection
cls._instance = ZMQWorkerConnection.instance()
return cls._instance
class WorkerInvocation(object):
def __init__(self, path, connection):
self._path = path
self._connection = connection
def __call__(self, *args, **kwargs):
return self._connection.invoke(self._path, *args, **kwargs)
def __getattr__(self, path):
return getattr(self._connection, self._path + '.' + path)
|
data/Julian/jsonschema/jsonschema/tests/test_validators.py
|
from collections import deque
from contextlib import contextmanager
import json
from jsonschema import FormatChecker, ValidationError
from jsonschema.tests.compat import mock, unittest
from jsonschema.validators import (
RefResolutionError, UnknownType, Draft3Validator,
Draft4Validator, RefResolver, create, extend, validator_for, validate,
)
class TestCreateAndExtend(unittest.TestCase):
def setUp(self):
self.meta_schema = {u"properties" : {u"smelly" : {}}}
self.smelly = mock.MagicMock()
self.validators = {u"smelly" : self.smelly}
self.types = {u"dict" : dict}
self.Validator = create(
meta_schema=self.meta_schema,
validators=self.validators,
default_types=self.types,
)
self.validator_value = 12
self.schema = {u"smelly" : self.validator_value}
self.validator = self.Validator(self.schema)
def test_attrs(self):
self.assertEqual(self.Validator.VALIDATORS, self.validators)
self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
def test_init(self):
self.assertEqual(self.validator.schema, self.schema)
def test_iter_errors(self):
instance = "hello"
self.smelly.return_value = []
self.assertEqual(list(self.validator.iter_errors(instance)), [])
error = mock.Mock()
self.smelly.return_value = [error]
self.assertEqual(list(self.validator.iter_errors(instance)), [error])
self.smelly.assert_called_with(
self.validator, self.validator_value, instance, self.schema,
)
def test_if_a_version_is_provided_it_is_registered(self):
with mock.patch("jsonschema.validators.validates") as validates:
validates.side_effect = lambda version : lambda cls : cls
Validator = create(meta_schema={u"id" : ""}, version="my version")
validates.assert_called_once_with("my version")
self.assertEqual(Validator.__name__, "MyVersionValidator")
def test_if_a_version_is_not_provided_it_is_not_registered(self):
with mock.patch("jsonschema.validators.validates") as validates:
create(meta_schema={u"id" : "id"})
self.assertFalse(validates.called)
def test_extend(self):
validators = dict(self.Validator.VALIDATORS)
new = mock.Mock()
Extended = extend(self.Validator, validators={u"a new one" : new})
validators.update([(u"a new one", new)])
self.assertEqual(Extended.VALIDATORS, validators)
self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
class TestIterErrors(unittest.TestCase):
def setUp(self):
self.validator = Draft3Validator({})
def test_iter_errors(self):
instance = [1, 2]
schema = {
u"disallow" : u"array",
u"enum" : [["a", "b", "c"], ["d", "e", "f"]],
u"minItems" : 3
}
got = (e.message for e in self.validator.iter_errors(instance, schema))
expected = [
"%r is disallowed for [1, 2]" % (schema["disallow"],),
"[1, 2] is too short",
"[1, 2] is not one of %r" % (schema["enum"],),
]
self.assertEqual(sorted(got), sorted(expected))
def test_iter_errors_multiple_failures_one_validator(self):
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
schema = {
u"properties" : {
"foo" : {u"type" : "string"},
"bar" : {u"minItems" : 2},
"baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]},
}
}
errors = list(self.validator.iter_errors(instance, schema))
self.assertEqual(len(errors), 4)
class TestValidationErrorMessages(unittest.TestCase):
def message_for(self, instance, schema, *args, **kwargs):
kwargs.setdefault("cls", Draft3Validator)
with self.assertRaises(ValidationError) as e:
validate(instance, schema, *args, **kwargs)
return e.exception.message
def test_single_type_failure(self):
message = self.message_for(instance=1, schema={u"type" : u"string"})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_single_type_list_failure(self):
message = self.message_for(instance=1, schema={u"type" : [u"string"]})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_multiple_type_failure(self):
types = u"string", u"object"
message = self.message_for(instance=1, schema={u"type" : list(types)})
self.assertEqual(message, "1 is not of type %r, %r" % types)
def test_object_without_title_type_failure(self):
type = {u"type" : [{u"minimum" : 3}]}
message = self.message_for(instance=1, schema={u"type" : [type]})
self.assertEqual(message, "1 is not of type %r" % (type,))
def test_object_with_name_type_failure(self):
name = "Foo"
schema = {u"type" : [{u"name" : name, u"minimum" : 3}]}
message = self.message_for(instance=1, schema=schema)
self.assertEqual(message, "1 is not of type %r" % (name,))
def test_minimum(self):
message = self.message_for(instance=1, schema={"minimum" : 2})
self.assertEqual(message, "1 is less than the minimum of 2")
def test_maximum(self):
message = self.message_for(instance=1, schema={"maximum" : 0})
self.assertEqual(message, "1 is greater than the maximum of 0")
def test_dependencies_failure_has_single_element_not_list(self):
depend, on = "bar", "foo"
schema = {u"dependencies" : {depend : on}}
message = self.message_for({"bar" : 2}, schema)
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
def test_additionalItems_single_failure(self):
message = self.message_for(
[2], {u"items" : [], u"additionalItems" : False},
)
self.assertIn("(2 was unexpected)", message)
def test_additionalItems_multiple_failures(self):
message = self.message_for(
[1, 2, 3], {u"items" : [], u"additionalItems" : False}
)
self.assertIn("(1, 2, 3 were unexpected)", message)
def test_additionalProperties_single_failure(self):
additional = "foo"
schema = {u"additionalProperties" : False}
message = self.message_for({additional : 2}, schema)
self.assertIn("(%r was unexpected)" % (additional,), message)
def test_additionalProperties_multiple_failures(self):
schema = {u"additionalProperties" : False}
message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
self.assertIn(repr("foo"), message)
self.assertIn(repr("bar"), message)
self.assertIn("were unexpected)", message)
def test_invalid_format_default_message(self):
checker = FormatChecker(formats=())
check_fn = mock.Mock(return_value=False)
checker.checks(u"thing")(check_fn)
schema = {u"format" : u"thing"}
message = self.message_for("bla", schema, format_checker=checker)
self.assertIn(repr("bla"), message)
self.assertIn(repr("thing"), message)
self.assertIn("is not a", message)
class TestValidationErrorDetails(unittest.TestCase):
def test_anyOf(self):
instance = 5
schema = {
"anyOf": [
{"minimum": 20},
{"type": "string"}
]
}
validator = Draft4Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "anyOf")
self.assertEqual(e.validator_value, schema["anyOf"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["anyOf"]))
self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "minimum")
self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["anyOf"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "minimum"]))
self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
self.assertEqual(
e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
)
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "type")
self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
self.assertEqual(e2.instance, instance)
self.assertEqual(e2.schema, schema["anyOf"][1])
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque([]))
self.assertEqual(e2.relative_path, deque([]))
self.assertEqual(e2.absolute_path, deque([]))
self.assertEqual(e2.schema_path, deque([1, "type"]))
self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
self.assertEqual(len(e2.context), 0)
def test_type(self):
instance = {"foo": 1}
schema = {
"type": [
{"type": "integer"},
{
"type": "object",
"properties": {
"foo": {"enum": [2]}
}
}
]
}
validator = Draft3Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "type")
self.assertEqual(e.validator_value, schema["type"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["type"]))
self.assertEqual(e.relative_schema_path, deque(["type"]))
self.assertEqual(e.absolute_schema_path, deque(["type"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "type")
self.assertEqual(e1.validator_value, schema["type"][0]["type"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["type"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "type"]))
self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "enum")
self.assertEqual(e2.validator_value, [2])
self.assertEqual(e2.instance, 1)
self.assertEqual(e2.schema, {u"enum" : [2]})
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e2.relative_path, deque(["foo"]))
self.assertEqual(e2.absolute_path, deque(["foo"]))
self.assertEqual(
e2.schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.absolute_schema_path,
deque(["type", 1, "properties", "foo", "enum"]),
)
self.assertFalse(e2.context)
def test_single_nesting(self):
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
schema = {
"properties" : {
"foo" : {"type" : "string"},
"bar" : {"minItems" : 2},
"baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]},
}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["baz"]))
self.assertEqual(e3.path, deque(["baz"]))
self.assertEqual(e4.path, deque(["foo"]))
self.assertEqual(e1.relative_path, deque(["bar"]))
self.assertEqual(e2.relative_path, deque(["baz"]))
self.assertEqual(e3.relative_path, deque(["baz"]))
self.assertEqual(e4.relative_path, deque(["foo"]))
self.assertEqual(e1.absolute_path, deque(["bar"]))
self.assertEqual(e2.absolute_path, deque(["baz"]))
self.assertEqual(e3.absolute_path, deque(["baz"]))
self.assertEqual(e4.absolute_path, deque(["foo"]))
self.assertEqual(e1.validator, "minItems")
self.assertEqual(e2.validator, "enum")
self.assertEqual(e3.validator, "maximum")
self.assertEqual(e4.validator, "type")
def test_multiple_nesting(self):
instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"]
schema = {
"type" : "string",
"items" : {
"type" : ["string", "object"],
"properties" : {
"foo" : {"enum" : [1, 3]},
"bar" : {
"type" : "array",
"properties" : {
"bar" : {"required" : True},
"baz" : {"minItems" : 2},
}
}
}
}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e2.path, deque([0]))
self.assertEqual(e3.path, deque([1, "bar"]))
self.assertEqual(e4.path, deque([1, "bar", "bar"]))
self.assertEqual(e5.path, deque([1, "bar", "baz"]))
self.assertEqual(e6.path, deque([1, "foo"]))
self.assertEqual(e1.schema_path, deque(["type"]))
self.assertEqual(e2.schema_path, deque(["items", "type"]))
self.assertEqual(
list(e3.schema_path), ["items", "properties", "bar", "type"],
)
self.assertEqual(
list(e4.schema_path),
["items", "properties", "bar", "properties", "bar", "required"],
)
self.assertEqual(
list(e5.schema_path),
["items", "properties", "bar", "properties", "baz", "minItems"]
)
self.assertEqual(
list(e6.schema_path), ["items", "properties", "foo", "enum"],
)
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "type")
self.assertEqual(e3.validator, "type")
self.assertEqual(e4.validator, "required")
self.assertEqual(e5.validator, "minItems")
self.assertEqual(e6.validator, "enum")
def test_recursive(self):
schema = {
"definitions": {
"node": {
"anyOf": [{
"type": "object",
"required": ["name", "children"],
"properties": {
"name": {
"type": "string",
},
"children": {
"type": "object",
"patternProperties": {
"^.*$": {
"$ref": "
},
},
},
},
}],
},
},
"type": "object",
"required": ["root"],
"properties": {
"root": {"$ref": "
}
}
instance = {
"root": {
"name": "root",
"children": {
"a": {
"name": "a",
"children": {
"ab": {
"name": "ab",
}
}
},
},
},
}
validator = Draft4Validator(schema)
e, = validator.iter_errors(instance)
self.assertEqual(e.absolute_path, deque(["root"]))
self.assertEqual(
e.absolute_schema_path, deque(["properties", "root", "anyOf"]),
)
e1, = e.context
self.assertEqual(e1.absolute_path, deque(["root", "children", "a"]))
self.assertEqual(
e1.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
],
),
)
e2, = e1.context
self.assertEqual(
e2.absolute_path, deque(
["root", "children", "a", "children", "ab"],
),
)
self.assertEqual(
e2.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf"
],
),
)
def test_additionalProperties(self):
instance = {"bar": "bar", "foo": 2}
schema = {
"additionalProperties" : {"type": "integer", "minimum": 5}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_patternProperties(self):
instance = {"bar": 1, "foo": 2}
schema = {
"patternProperties" : {
"bar": {"type": "string"},
"foo": {"minimum": 5}
}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems(self):
instance = ["foo", 1]
schema = {
"items": [],
"additionalItems" : {"type": "integer", "minimum": 5}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([0]))
self.assertEqual(e2.path, deque([1]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems_with_items(self):
instance = ["foo", "bar", 1]
schema = {
"items": [{}],
"additionalItems" : {"type": "integer", "minimum": 5}
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([1]))
self.assertEqual(e2.path, deque([2]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
class ValidatorTestMixin(object):
def setUp(self):
self.instance = mock.Mock()
self.schema = {}
self.resolver = mock.Mock()
self.validator = self.validator_class(self.schema)
def test_valid_instances_are_valid(self):
errors = iter([])
with mock.patch.object(
self.validator, "iter_errors", return_value=errors,
):
self.assertTrue(
self.validator.is_valid(self.instance, self.schema)
)
def test_invalid_instances_are_not_valid(self):
errors = iter([mock.Mock()])
with mock.patch.object(
self.validator, "iter_errors", return_value=errors,
):
self.assertFalse(
self.validator.is_valid(self.instance, self.schema)
)
def test_non_existent_properties_are_ignored(self):
instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
validate(instance=instance, schema={my_property : my_value})
def test_it_creates_a_ref_resolver_if_not_provided(self):
self.assertIsInstance(self.validator.resolver, RefResolver)
def test_it_delegates_to_a_ref_resolver(self):
resolver = RefResolver("", {})
schema = {"$ref" : mock.Mock()}
with mock.patch.object(resolver, "resolve") as resolve:
resolve.return_value = "url", {"type": "integer"}
with self.assertRaises(ValidationError):
self.validator_class(schema, resolver=resolver).validate(None)
resolve.assert_called_once_with(schema["$ref"])
def test_it_delegates_to_a_legacy_ref_resolver(self):
"""
Legacy RefResolvers support only the context manager form of
resolution.
"""
class LegacyRefResolver(object):
@contextmanager
def resolving(this, ref):
self.assertEqual(ref, "the ref")
yield {"type" : "integer"}
resolver = LegacyRefResolver()
schema = {"$ref" : "the ref"}
with self.assertRaises(ValidationError):
self.validator_class(schema, resolver=resolver).validate(None)
def test_is_type_is_true_for_valid_type(self):
self.assertTrue(self.validator.is_type("foo", "string"))
def test_is_type_is_false_for_invalid_type(self):
self.assertFalse(self.validator.is_type("foo", "array"))
def test_is_type_evades_bool_inheriting_from_int(self):
self.assertFalse(self.validator.is_type(True, "integer"))
self.assertFalse(self.validator.is_type(True, "number"))
def test_is_type_raises_exception_for_unknown_type(self):
with self.assertRaises(UnknownType):
self.validator.is_type("foo", object())
class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
validator_class = Draft3Validator
def test_is_type_is_true_for_any_type(self):
self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
self.assertTrue(self.validator.is_type(True, "boolean"))
self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
def test_non_string_custom_types(self):
schema = {'type': [None]}
cls = self.validator_class(schema, types={None: type(None)})
cls.validate(None, schema)
class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
validator_class = Draft4Validator
class TestBuiltinFormats(unittest.TestCase):
"""
The built-in (specification-defined) formats do not raise type errors.
If an instance or value is not a string, it should be ignored.
"""
for format in FormatChecker.checkers:
def test(self, format=format):
v = Draft4Validator({"format": format}, format_checker=FormatChecker())
v.validate(123)
name = "test_{0}_ignores_non_strings".format(format)
test.__name__ = name
setattr(TestBuiltinFormats, name, test)
del test
class TestValidatorFor(unittest.TestCase):
def test_draft_3(self):
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
self.assertIs(validator_for(schema), Draft3Validator)
schema = {"$schema" : "http://json-schema.org/draft-03/schema
self.assertIs(validator_for(schema), Draft3Validator)
def test_draft_4(self):
schema = {"$schema" : "http://json-schema.org/draft-04/schema"}
self.assertIs(validator_for(schema), Draft4Validator)
schema = {"$schema" : "http://json-schema.org/draft-04/schema
self.assertIs(validator_for(schema), Draft4Validator)
def test_custom_validator(self):
Validator = create(meta_schema={"id" : "meta schema id"}, version="12")
schema = {"$schema" : "meta schema id"}
self.assertIs(validator_for(schema), Validator)
def test_validator_for_jsonschema_default(self):
self.assertIs(validator_for({}), Draft4Validator)
def test_validator_for_custom_default(self):
self.assertIs(validator_for({}, default=None), None)
class TestValidate(unittest.TestCase):
def test_draft3_validator_is_chosen(self):
schema = {"$schema" : "http://json-schema.org/draft-03/schema
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
def test_draft4_validator_is_chosen(self):
schema = {"$schema" : "http://json-schema.org/draft-04/schema
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
def test_draft4_validator_is_the_default(self):
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
validate({}, {})
chk_schema.assert_called_once_with({})
class TestRefResolver(unittest.TestCase):
base_uri = ""
stored_uri = "foo://stored"
stored_schema = {"stored" : "schema"}
def setUp(self):
self.referrer = {}
self.store = {self.stored_uri : self.stored_schema}
self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
def test_it_does_not_retrieve_schema_urls_from_the_network(self):
ref = Draft3Validator.META_SCHEMA["id"]
with mock.patch.object(self.resolver, "resolve_remote") as remote:
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
self.assertFalse(remote.called)
def test_it_resolves_local_refs(self):
ref = "
self.referrer["properties"] = {"foo" : object()}
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, self.referrer["properties"]["foo"])
def test_it_resolves_local_refs_with_id(self):
schema = {"id": "http://bar/schema
resolver = RefResolver.from_schema(schema)
with resolver.resolving("
self.assertEqual(resolved, schema["a"])
with resolver.resolving("http://bar/schema
self.assertEqual(resolved, schema["a"])
def test_it_retrieves_stored_refs(self):
with self.resolver.resolving(self.stored_uri) as resolved:
self.assertIs(resolved, self.stored_schema)
self.resolver.store["cached_ref"] = {"foo" : 12}
with self.resolver.resolving("cached_ref
self.assertEqual(resolved, 12)
def test_it_retrieves_unstored_refs_via_requests(self):
ref = "http://bar
schema = {"baz" : 12}
with mock.patch("jsonschema.validators.requests") as requests:
requests.get.return_value.json.return_value = schema
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, 12)
requests.get.assert_called_once_with("http://bar")
def test_it_retrieves_unstored_refs_via_urlopen(self):
ref = "http://bar
schema = {"baz" : 12}
with mock.patch("jsonschema.validators.requests", None):
with mock.patch("jsonschema.validators.urlopen") as urlopen:
urlopen.return_value.read.return_value = (
json.dumps(schema).encode("utf8"))
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, 12)
urlopen.assert_called_once_with("http://bar")
def test_it_can_construct_a_base_uri_from_a_schema(self):
schema = {"id" : "foo"}
resolver = RefResolver.from_schema(schema)
self.assertEqual(resolver.base_uri, "foo")
self.assertEqual(resolver.resolution_scope, "foo")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("
self.assertEqual(resolved, schema)
with resolver.resolving("foo") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("foo
self.assertEqual(resolved, schema)
def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
schema = {}
resolver = RefResolver.from_schema(schema)
self.assertEqual(resolver.base_uri, "")
self.assertEqual(resolver.resolution_scope, "")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("
self.assertEqual(resolved, schema)
def test_custom_uri_scheme_handlers(self):
schema = {"foo": "bar"}
ref = "foo://bar"
foo_handler = mock.Mock(return_value=schema)
resolver = RefResolver("", {}, handlers={"foo": foo_handler})
with resolver.resolving(ref) as resolved:
self.assertEqual(resolved, schema)
foo_handler.assert_called_once_with(ref)
def test_cache_remote_on(self):
ref = "foo://bar"
foo_handler = mock.Mock()
resolver = RefResolver(
"", {}, cache_remote=True, handlers={"foo" : foo_handler},
)
with resolver.resolving(ref):
pass
with resolver.resolving(ref):
pass
foo_handler.assert_called_once_with(ref)
def test_cache_remote_off(self):
ref = "foo://bar"
foo_handler = mock.Mock()
resolver = RefResolver(
"", {}, cache_remote=False, handlers={"foo" : foo_handler},
)
with resolver.resolving(ref):
pass
self.assertEqual(foo_handler.call_count, 1)
def test_if_you_give_it_junk_you_get_a_resolution_error(self):
ref = "foo://bar"
foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
resolver = RefResolver("", {}, handlers={"foo" : foo_handler})
with self.assertRaises(RefResolutionError) as err:
with resolver.resolving(ref):
pass
self.assertEqual(str(err.exception), "Oh no! What's this?")
def test_helpful_error_message_on_failed_pop_scope(self):
resolver = RefResolver("", {})
resolver.pop_scope()
with self.assertRaises(RefResolutionError) as exc:
resolver.pop_scope()
self.assertIn("Failed to pop the scope", str(exc.exception))
class UniqueTupleItemsMixin(object):
"""
A tuple instance properly formats validation errors for uniqueItems.
See https://github.com/Julian/jsonschema/pull/224
"""
def test_it_properly_formats_an_error_message(self):
validator = self.validator_class(
schema={"uniqueItems" : True},
types={"array" : (tuple,)},
)
with self.assertRaises(ValidationError) as e:
validator.validate((1, 1))
self.assertIn("(1, 1) has non-unique elements", str(e.exception))
class TestDraft4UniqueTupleItems(UniqueTupleItemsMixin, unittest.TestCase):
validator_class = Draft4Validator
class TestDraft3UniqueTupleItems(UniqueTupleItemsMixin, unittest.TestCase):
validator_class = Draft3Validator
def sorted_errors(errors):
def key(error):
return (
[str(e) for e in error.path],
[str(e) for e in error.schema_path]
)
return sorted(errors, key=key)
|
data/Piratenfraktion-Berlin/OwnTube/videoportal/BitTornadoABC/BitTornado/BT1/StreamCheck.py
|
from cStringIO import StringIO
from binascii import b2a_hex
from urllib import quote
import Connecter
try:
True
except:
True = 1
False = 0
DEBUG = False
protocol_name = 'BitTorrent protocol'
option_pattern = chr(0)*8
def toint(s):
return long(b2a_hex(s), 16)
def tohex(s):
return b2a_hex(s).upper()
def make_readable(s):
if not s:
return ''
if quote(s).find('%') >= 0:
return tohex(s)
return '"'+s+'"'
streamno = 0
class StreamCheck:
def __init__(self):
global streamno
self.no = streamno
streamno += 1
self.buffer = StringIO()
self.next_len, self.next_func = 1, self.read_header_len
def read_header_len(self, s):
if ord(s) != len(protocol_name):
print self.no, 'BAD HEADER LENGTH'
return len(protocol_name), self.read_header
def read_header(self, s):
if s != protocol_name:
print self.no, 'BAD HEADER'
return 8, self.read_reserved
def read_reserved(self, s):
return 20, self.read_download_id
def read_download_id(self, s):
if DEBUG:
print self.no, 'download ID ' + tohex(s)
return 20, self.read_peer_id
def read_peer_id(self, s):
if DEBUG:
print self.no, 'peer ID' + make_readable(s)
return 4, self.read_len
def read_len(self, s):
l = toint(s)
if l > 2 ** 23:
print self.no, 'BAD LENGTH: '+str(l)+' ('+s+')'
return l, self.read_message
def read_message(self, s):
if not s:
return 4, self.read_len
m = s[0]
if ord(m) > 8:
print self.no, 'BAD MESSAGE: '+str(ord(m))
if m == Connecter.REQUEST:
if len(s) != 13:
print self.no, 'BAD REQUEST SIZE: '+str(len(s))
return 4, self.read_len
index = toint(s[1:5])
begin = toint(s[5:9])
length = toint(s[9:])
print self.no, 'Request: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
elif m == Connecter.CANCEL:
if len(s) != 13:
print self.no, 'BAD CANCEL SIZE: '+str(len(s))
return 4, self.read_len
index = toint(s[1:5])
begin = toint(s[5:9])
length = toint(s[9:])
print self.no, 'Cancel: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
elif m == Connecter.PIECE:
index = toint(s[1:5])
begin = toint(s[5:9])
length = len(s)-9
print self.no, 'Piece: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
else:
print self.no, 'Message '+str(ord(m))+' (length '+str(len(s))+')'
return 4, self.read_len
def write(self, s):
while 1:
i = self.next_len - self.buffer.tell()
if i > len(s):
self.buffer.write(s)
return
self.buffer.write(s[:i])
s = s[i:]
m = self.buffer.getvalue()
self.buffer.reset()
self.buffer.truncate()
x = self.next_func(m)
self.next_len, self.next_func = x
|
data/agoragames/haigha/haigha/message.py
|
'''
Copyright (c) 2011-2015, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
class Message(object):
'''
Represents an AMQP message.
'''
def __init__(self, body='', delivery_info=None, return_info=None,
**properties):
'''
:param delivery_info: pass only if messages was received via
basic.deliver or basic.get_ok; MUST be None otherwise; default: None
:param return_info: pass only if message was returned via basic.return;
MUST be None otherwise; default: None
'''
if isinstance(body, unicode):
if 'content_encoding' not in properties:
properties['content_encoding'] = 'utf-8'
body = body.encode(properties['content_encoding'])
if not isinstance(body, (str, unicode, bytearray)):
raise TypeError("Invalid message content type %s" % (type(body)))
self._body = body
self._delivery_info = delivery_info
self._return_info = return_info
self._properties = properties
@property
def body(self):
return self._body
def __len__(self):
return len(self._body)
def __nonzero__(self):
'''Have to define this because length is defined.'''
return True
def __eq__(self, other):
if isinstance(other, Message):
return self._properties == other._properties and \
self._body == other._body
return False
@property
def delivery_info(self):
'''delivery_info dict if message was received via basic.deliver or
basic.get_ok; None otherwise.
'''
return self._delivery_info
@property
def return_info(self):
'''return_info dict if message was returned via basic.return; None
otherwise.
properties:
'channel': Channel instance
'reply_code': reply code (int)
'reply_text': reply text
'exchange': exchange name
'routing_key': routing key
'''
return self._return_info
@property
def properties(self):
return self._properties
def __str__(self):
return ("Message[body: %s, delivery_info: %s, return_info: %s, "
"properties: %s]") %\
(str(self._body).encode('string_escape'),
self._delivery_info, self.return_info, self._properties)
|
data/RobotLocomotion/director/src/python/director/doordemo.py
|
import os
import sys
import vtkAll as vtk
import math
import time
import types
import functools
import numpy as np
from director import transformUtils
from director import lcmUtils
from director.timercallback import TimerCallback
from director.asynctaskqueue import AsyncTaskQueue
from director.fieldcontainer import FieldContainer
from director import objectmodel as om
from director import visualization as vis
from director import applogic as app
from director.debugVis import DebugData
from director import ik
from director.ikparameters import IkParameters
from director import ikplanner
from director import ioUtils
from director import affordanceitems
from director.simpletimer import SimpleTimer
from director.utime import getUtime
from director import robotstate
from director import robotplanlistener
from director import segmentation
from director import planplayback
from director.footstepsdriver import FootstepRequestGenerator
from director.tasks.taskuserpanel import TaskUserPanel
from director.tasks.taskuserpanel import ImageBasedAffordanceFit
import director.tasks.robottasks as rt
import director.tasks.taskmanagerwidget as tmw
import drc as lcmdrc
from PythonQt import QtCore, QtGui
class DoorDemo(object):
def __init__(self, robotModel, footstepPlanner, manipPlanner, ikPlanner, lhandDriver, rhandDriver, atlasDriver, multisenseDriver, affordanceFitFunction, sensorJointController, planPlaybackFunction, showPoseFunction):
self.robotModel = robotModel
self.footstepPlanner = footstepPlanner
self.manipPlanner = manipPlanner
self.ikPlanner = ikPlanner
self.lhandDriver = lhandDriver
self.rhandDriver = rhandDriver
self.atlasDriver = atlasDriver
self.multisenseDriver = multisenseDriver
self.affordanceFitFunction = affordanceFitFunction
self.sensorJointController = sensorJointController
self.planPlaybackFunction = planPlaybackFunction
self.showPoseFunction = showPoseFunction
self.graspingHand = 'left'
self.endPose = None
self.planFromCurrentRobotState = True
self.visOnly = False
self.useFootstepPlanner = False
self.userPromptEnabled = True
self.constraintSet = None
self.plans = []
self.usePinchGrasp = False
self.pinchDistance = 0.1
self.doorHandleFrame = None
self.doorHandleGraspFrame = None
self.doorHingeFrame = None
self.handleTouchHeight = 0.0
self.handleTouchDepth = -0.08
self.handleTouchWidth = 0.06
self.handleReachAngle = 20
self.handleTurnHeight = -0.08
self.handleTurnWidth = 0.01
self.handleTurnAngle = 60
self.handleLiftHeight = 0.12
self.handlePushDepth = 0.0
self.handlePushAngle = 2
self.handleOpenDepth = 0.1
self.handleOpenWidth = 0.4
self.speedHigh = 60
self.speedLow = 15
self.setFootstepThroughDoorParameters()
self.setChopParametersToDefaults()
def setChopParametersToDefaults(self):
self.preChopDepth = -0.06
self.preChopWidth = -0.08
self.preChopHeight = 0.10
self.chopDistance = -0.15
self.chopSidewaysDistance = 0.03
def addPlan(self, plan):
self.plans.append(plan)
def computeGraspOrientation(self):
return [180 + self.handleReachAngle, 0, 90]
def computeGroundFrame(self, robotModel):
'''
Given a robol model, returns a vtkTransform at a position between
the feet, on the ground, with z-axis up and x-axis aligned with the
robot pelvis x-axis.
'''
t1 = robotModel.getLinkFrame('l_foot')
t2 = robotModel.getLinkFrame('r_foot')
pelvisT = robotModel.getLinkFrame('pelvis')
xaxis = [1.0, 0.0, 0.0]
pelvisT.TransformVector(xaxis, xaxis)
xaxis = np.array(xaxis)
zaxis = np.array([0.0, 0.0, 1.0])
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
stancePosition = (np.array(t2.GetPosition()) + np.array(t1.GetPosition())) / 2.0
footHeight = 0.0811
t = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
t.PostMultiply()
t.Translate(stancePosition)
t.Translate([0.0, 0.0, -footHeight])
return t
def computeDoorHandleGraspFrame(self):
doorSide = 1 if self.graspingHand == 'left' else -1
graspOrientation = self.computeGraspOrientation()
self.doorHandleAxisFrame = self.computeDoorHandleAxisFrame()
def makeFrame(name, offset, turnAngle=0):
t = transformUtils.frameFromPositionAndRPY(offset, graspOrientation)
t.PostMultiply()
t.Concatenate(transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0], [-turnAngle, 0, 0]))
t.Concatenate(transformUtils.copyFrame(self.doorHandleAxisFrame.transform))
return vis.updateFrame(t, name, parent=self.doorHandleAffordance, visible=False, scale=0.2)
def makeFrameNew(name, transforms):
t = transformUtils.concatenateTransforms(transforms)
return vis.updateFrame(t, name, parent=self.doorHandleAffordance, visible=False, scale=0.2)
graspToAxisTransform = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0],
graspOrientation)
self.doorHandleGraspFrame = makeFrameNew('door handle grasp frame',
[graspToAxisTransform,
self.doorHandleAxisFrame.transform])
if self.usePinchGrasp:
reachToGraspTransform = transformUtils.frameFromPositionAndRPY([-doorSide*self.handleTouchWidth,
self.handleTouchDepth,
-self.handleTouchHeight],
[0.0, 0.0, 0.0])
self.doorHandleReachFrame = makeFrameNew('door handle reach frame', [reachToGraspTransform,
self.doorHandleGraspFrame.transform])
handleTurnTransform = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0], [-self.handleTurnAngle, 0, 0])
self.doorHandleTurnFrame = makeFrameNew('door handle turn frame', [reachToGraspTransform,
graspToAxisTransform,
handleTurnTransform,
self.doorHandleAxisFrame.transform])
handlePushTransform = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0],
[0, 0, self.handlePushAngle])
self.doorHandlePushFrame = makeFrameNew('door handle push frame',
[self.doorHandleTurnFrame.transform,
self.doorHingeFrame.transform.GetInverse(),
handlePushTransform,
self.doorHingeFrame.transform])
self.doorHandlePushLiftFrame = makeFrameNew('door handle push lift frame',
[self.doorHandleReachFrame.transform,
self.doorHingeFrame.transform.GetInverse(),
handlePushTransform,
self.doorHingeFrame.transform])
self.doorHandlePushLiftAxisFrame = makeFrameNew('door handle push lift axis frame',
[self.doorHandleAxisFrame.transform,
self.doorHingeFrame.transform.GetInverse(),
handlePushTransform,
self.doorHingeFrame.transform])
self.doorHandlePushOpenFrame = makeFrame('door handle push open frame', [self.handleOpenDepth, self.handleOpenWidth, self.handleLiftHeight])
t = vtk.vtkTransform()
t.PostMultiply()
t.RotateX(25)
t.Concatenate(self.doorHandlePushOpenFrame.transform)
self.doorHandlePushOpenFrame.copyFrame(t)
else:
reachToAxisTransform = transformUtils.frameFromPositionAndRPY([self.preChopDepth,
doorSide*self.preChopWidth,
self.preChopHeight],
[0, 90, -90])
obj = om.findObjectByName('door handle reach frame')
self.doorHandleReachFrame = makeFrameNew('door handle reach frame',
[reachToAxisTransform, self.doorHandleAxisFrame.transform])
if not obj:
obj = self.doorHandleReachFrame
obj.setProperty('Edit', True)
obj.setProperty('Visible', True)
rep = obj.widget.GetRepresentation()
rep.SetRotateAxisEnabled(0, False)
rep.SetRotateAxisEnabled(1, False)
rep.SetRotateAxisEnabled(2, False)
obj.widget.HandleRotationEnabledOff()
obj.setProperty('Edit', False)
preChopToReachTransform = transformUtils.frameFromPositionAndRPY([0.0,
-0.15,
0.0],
[0, 0, 0])
self.doorHandlePreChopFrame = makeFrameNew('door handle pre-chop frame',
[preChopToReachTransform, self.doorHandleReachFrame.transform])
self.doorHandleFrame.frameSync = vis.FrameSync()
self.doorHandleFrame.frameSync.addFrame(self.doorHandleFrame)
self.doorHandleFrame.frameSync.addFrame(self.doorHandleAxisFrame)
self.doorHandleFrame.frameSync.addFrame(self.doorHandleGraspFrame, ignoreIncoming=True)
self.doorHandleFrame.frameSync.addFrame(self.doorHandleReachFrame, ignoreIncoming=True)
if self.usePinchGrasp:
self.doorHandleFrame.frameSync.addFrame(self.doorHandleTurnFrame, ignoreIncoming=True)
self.doorHandleFrame.frameSync.addFrame(self.doorHandlePushFrame, ignoreIncoming=True)
self.doorHandleFrame.frameSync.addFrame(self.doorHandlePushLiftFrame, ignoreIncoming=True)
self.doorHandleFrame.frameSync.addFrame(self.doorHandlePushLiftAxisFrame, ignoreIncoming=True)
self.doorHandleFrame.frameSync.addFrame(self.doorHandlePushOpenFrame, ignoreIncoming=True)
else:
self.doorHandleFrame.frameSync.addFrame(self.doorHandlePreChopFrame, ignoreIncoming=True)
def computeDoorHandleAxisFrame(self):
handleLength = self.doorHandleAffordance.getProperty('Dimensions')[1]
doorSide = 1 if self.graspingHand == 'left' else -1
t = transformUtils.frameFromPositionAndRPY([0.0, doorSide*handleLength/2.0, 0.0], [0, 0, 0])
t.PostMultiply()
t.Concatenate(transformUtils.copyFrame(self.doorHandleFrame.transform))
return vis.updateFrame(t, 'door handle axis frame', parent=self.doorHandleAffordance,
visible=False, scale=0.2)
def computeDoorHingeFrame(self):
doorSide = 1 if self.graspingHand == 'left' else -1
doorAffordance = om.findObjectByName('door')
doorDimensions = doorAffordance.getProperty('Dimensions')
doorDepth = doorDimensions[0]
doorWidth = doorDimensions[1]
t = transformUtils.frameFromPositionAndRPY([doorDepth/2, -doorSide*doorWidth/2.0, 0.0], [0, 0, 0])
t.PostMultiply()
t.Concatenate(transformUtils.copyFrame(doorAffordance.getChildFrame().transform))
self.doorHingeFrame = vis.updateFrame(t, 'door hinge frame', parent=doorAffordance,
visible=False, scale=0.2)
return self.doorHingeFrame
def computeDoorHandleStanceFrame(self):
graspFrame = self.doorHandleFrame.transform
groundFrame = self.computeGroundFrame(self.robotModel)
groundHeight = groundFrame.GetPosition()[2]
graspPosition = np.array(graspFrame.GetPosition())
xaxis = [1.0, 0.0, 0.0]
yaxis = [0.0, 1.0, 0.0]
zaxis = [0, 0, 1]
graspFrame.TransformVector(xaxis, xaxis)
graspFrame.TransformVector(yaxis, yaxis)
yaxis = np.cross(zaxis, xaxis)
yaxis /= np.linalg.norm(yaxis)
xaxis = np.cross(yaxis, zaxis)
graspGroundFrame = transformUtils.getTransformFromAxes(xaxis, yaxis, zaxis)
graspGroundFrame.PostMultiply()
graspGroundFrame.Translate(graspPosition[0], graspPosition[1], groundHeight)
position = [-0.77, 0.4, 0.0]
rpy = [0, 0, -20]
t = transformUtils.frameFromPositionAndRPY(position, rpy)
t.Concatenate(graspGroundFrame)
self.doorHandleStanceFrame = vis.updateFrame(t, 'door handle grasp stance', parent=self.doorHandleAffordance, visible=True, scale=0.2)
def moveRobotToStanceFrame(self):
frame = self.doorHandleStanceFrame.transform
self.sensorJointController.setPose('q_nom')
stancePosition = frame.GetPosition()
stanceOrientation = frame.GetOrientation()
self.sensorJointController.q[:2] = [stancePosition[0], stancePosition[1]]
self.sensorJointController.q[5] = math.radians(stanceOrientation[2])
self.sensorJointController.push()
def planNominal(self):
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'General', 'safe nominal')
endPose, info = self.ikPlanner.computeStandPose(endPose)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose)
self.addPlan(newPlan)
def planPreReach(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedHigh)
nonGraspingHand = 'right' if self.graspingHand == 'left' else 'left'
startPose = self.getPlanningStartPose()
if self.usePinchGrasp:
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door',
'pre-reach grasping',
side=self.graspingHand)
else:
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door',
'pre-reach chop',
side=self.graspingHand)
endPose = self.ikPlanner.getMergedPostureFromDatabase(endPose, 'door',
'pre-reach non-grasping',
side=nonGraspingHand)
endPose, info = self.ikPlanner.computeStandPose(endPose, ikParameters=ikParameters)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose, ikParameters=ikParameters)
self.addPlan(newPlan)
def planUnReach(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedLow)
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door',
'pre-reach grasping',
side=self.graspingHand)
endPose, info = self.ikPlanner.computeStandPose(endPose, ikParameters=ikParameters)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose, ikParameters=ikParameters)
self.addPlan(newPlan)
def planTuckArms(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedHigh)
otherSide = 'left' if self.graspingHand == 'right' else 'right'
startPose = self.getPlanningStartPose()
standPose, info = self.ikPlanner.computeStandPose(startPose, ikParameters=ikParameters)
q2 = self.ikPlanner.getMergedPostureFromDatabase(standPose, 'door', 'hand up tuck', side=otherSide)
a = 0.25
q2 = (1.0 - a)*np.array(standPose) + a*q2
q2 = self.ikPlanner.getMergedPostureFromDatabase(q2, 'door', 'hand up tuck', side=self.graspingHand)
a = 0.75
q2 = (1.0 - a)*np.array(standPose) + a*q2
endPose = self.ikPlanner.getMergedPostureFromDatabase(standPose, 'door', 'hand up tuck', side=self.graspingHand)
endPose = self.ikPlanner.getMergedPostureFromDatabase(endPose, 'door', 'hand up tuck', side=otherSide)
newPlan = self.ikPlanner.computeMultiPostureGoal([startPose, q2, endPose], ikParameters=ikParameters)
self.addPlan(newPlan)
def planTuckArmsPrePush(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedHigh)
otherSide = 'left' if self.graspingHand == 'right' else 'right'
startPose = self.getPlanningStartPose()
standPose, info = self.ikPlanner.computeStandPose(startPose, ikParameters=ikParameters)
q2 = self.ikPlanner.getMergedPostureFromDatabase(standPose, 'door', 'hand up tuck', side=self.graspingHand)
a = 0.25
q2 = (1.0 - a)*np.array(standPose) + a*q2
q2 = self.ikPlanner.getMergedPostureFromDatabase(q2, 'door', 'hand up tuck', side=otherSide)
a = 0.75
q2 = (1.0 - a)*np.array(standPose) + a*q2
endPose = self.ikPlanner.getMergedPostureFromDatabase(standPose, 'door', 'hand up tuck', side=self.graspingHand)
endPose = self.ikPlanner.getMergedPostureFromDatabase(endPose, 'door', 'hand up tuck', side=otherSide)
newPlan = self.ikPlanner.computeMultiPostureGoal([startPose, q2, endPose], ikParameters=ikParameters)
self.addPlan(newPlan)
def planChop(self, deltaZ=None, deltaY=None, deltaX=None):
startPose = self.getPlanningStartPose()
if deltaZ is None:
deltaZ = self.chopDistance
if deltaY is None:
deltaY = self.chopSidewaysDistance
if deltaX is None:
deltaX = 0.0
linkOffsetFrame = self.ikPlanner.getPalmToHandLink(self.graspingHand)
handLinkName = self.ikPlanner.getHandLink(self.graspingHand)
startFrame = self.ikPlanner.getLinkFrameAtPose(handLinkName, startPose)
endToStartTransform = transformUtils.frameFromPositionAndRPY([deltaZ, -deltaX, -deltaY],
[0, 0, 0])
endFrame = transformUtils.concatenateTransforms([endToStartTransform, startFrame]);
vis.updateFrame(endFrame, 'debug chop', parent=self.doorHandleAffordance, visible=False, scale=0.2)
palmToWorld1 = transformUtils.concatenateTransforms([linkOffsetFrame, startFrame])
palmToWorld2 = transformUtils.concatenateTransforms([linkOffsetFrame, endFrame])
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, palmToWorld2)
constraintSet.nominalPoseName = 'q_start'
constraintSet.ikParameters = IkParameters(usePointwise=False,
maxDegreesPerSecond=self.speedLow,
numberOfAddedKnots=2)
endPose, info = constraintSet.runIk()
motionVector = np.array(palmToWorld2.GetPosition()) - np.array(palmToWorld1.GetPosition())
motionTargetFrame = transformUtils.getTransformFromOriginAndNormal(np.array(palmToWorld2.GetPosition()), motionVector)
p = self.ikPlanner.createLinePositionConstraint(handLinkName, linkOffsetFrame, motionTargetFrame, lineAxis=2, bounds=[-np.linalg.norm(motionVector), 0.0], positionTolerance=0.001)
constraintSet.constraints.append(p)
plan = constraintSet.runIkTraj()
self.addPlan(plan)
def stopPushing(self):
startPose = self.getPlanningStartPose
plan = self.robotSystem.ikPlanner.computePostureGoal(startPose, startPose)
self.addPlan(plan)
self.commitManipPlan()
def planReach(self, reachTargetFrame=None, jointSpeedLimit=None):
if reachTargetFrame is None:
reachTargetFrame = self.doorHandleReachFrame
if jointSpeedLimit is None:
jointSpeedLimit = self.speedLow
startPose = self.getPlanningStartPose()
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, reachTargetFrame)
constraintSet.nominalPoseName = 'q_start'
constraintSet.ikParameters = IkParameters(usePointwise=False,
maxDegreesPerSecond=self.speedLow,
numberOfAddedKnots=2)
endPose, info = constraintSet.runIk()
linkOffsetFrame = self.ikPlanner.getPalmToHandLink(self.graspingHand)
handLinkName = self.ikPlanner.getHandLink(self.graspingHand)
handToWorld1 = self.ikPlanner.getLinkFrameAtPose(handLinkName, startPose)
handToWorld2 = self.ikPlanner.getLinkFrameAtPose(handLinkName, endPose)
palmToWorld1 = transformUtils.concatenateTransforms([linkOffsetFrame, handToWorld1])
palmToWorld2 = transformUtils.concatenateTransforms([linkOffsetFrame, handToWorld2])
motionVector = np.array(palmToWorld2.GetPosition()) - np.array(palmToWorld1.GetPosition())
motionTargetFrame = transformUtils.getTransformFromOriginAndNormal(np.array(palmToWorld2.GetPosition()), motionVector)
p = self.ikPlanner.createLinePositionConstraint(handLinkName, linkOffsetFrame, motionTargetFrame, lineAxis=2, bounds=[-np.linalg.norm(motionVector), 0.0], positionTolerance=0.001)
constraintSet.constraints.append(p)
plan = constraintSet.runIkTraj()
self.addPlan(plan)
def planPreChop(self):
self.planReach(self.doorHandlePreChopFrame, self.speedHigh)
def createHingeConstraint(self, referenceFrame, axis, linkName, startPose, tspan=[0, 1]):
constraints = []
linkFrame = self.ikPlanner.getLinkFrameAtPose(linkName, startPose)
def addPivotPoint(constraints, pivotPoint):
constraints.append(ik.PositionConstraint())
constraints[-1].linkName = linkName
constraints[-1].referenceFrame = referenceFrame.transform
constraints[-1].lowerBound = np.array(pivotPoint)
constraints[-1].upperBound = np.array(pivotPoint)
pivotPointInWorld = referenceFrame.transform.TransformDoublePoint(pivotPoint)
constraints[-1].pointInLink = linkFrame.GetInverse().TransformDoublePoint(pivotPointInWorld)
constraints[-1].tspan = tspan
addPivotPoint(constraints, [0.0, 0.0, 0.0])
addPivotPoint(constraints, axis)
return constraints
def planHandleTurn(self, turnAngle=None):
doorSide = 1 if self.graspingHand == 'left' else -1
if turnAngle is None:
turnAngle = self.handleTurnAngle
startPose = self.getPlanningStartPose()
linkFrame = self.ikPlanner.getLinkFrameAtPose(self.ikPlanner.getHandLink(), startPose)
finalGraspToReferenceTransfrom = transformUtils.concatenateTransforms(
[self.ikPlanner.getPalmToHandLink(self.graspingHand), linkFrame,
self.doorHandleAxisFrame.transform.GetInverse()])
handleTurnTransform = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0],
[doorSide*turnAngle, 0, 0])
doorHandleTurnFrame = transformUtils.concatenateTransforms([finalGraspToReferenceTransfrom,
handleTurnTransform,
self.doorHandleAxisFrame.transform])
vis.updateFrame(doorHandleTurnFrame, 'debug turn', parent=self.doorHandleAffordance, visible=False, scale=0.2)
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand,
doorHandleTurnFrame)
constraintSet.ikParameters = IkParameters(usePointwise=False,
maxDegreesPerSecond=self.speedLow,
numberOfAddedKnots=2)
constraintSet.nominalPoseName = 'q_start'
endPose, info = constraintSet.runIk()
constraints = constraintSet.constraints
constraints.extend(self.createHingeConstraint(self.doorHandleAxisFrame, [1.0, 0.0, 0.0],
self.ikPlanner.getHandLink(),
constraintSet.startPoseName))
constraints.append(self.ikPlanner.createLockedBasePostureConstraint(constraintSet.startPoseName))
constraints.append(self.ikPlanner.createLockedBackPostureConstraint(constraintSet.startPoseName))
constraints.extend(self.ikPlanner.createFixedFootConstraints(constraintSet.startPoseName))
constraints.append(self.ikPlanner.createLockedArmPostureConstraint(constraintSet.startPoseName))
plan = constraintSet.runIkTraj()
self.addPlan(plan)
def planDoorPushOpen(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=15)
nonGraspingHand = 'right' if self.graspingHand == 'left' else 'left'
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door', 'smash', side=nonGraspingHand)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose, ikParameters=ikParameters)
self.addPlan(newPlan)
def planDoorPushOpenTwist(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=60)
nonGraspingHand = 'right' if self.graspingHand == 'left' else 'left'
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door', 'smash 2', side=nonGraspingHand)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose, ikParameters=ikParameters)
self.addPlan(newPlan)
def planHandlePush(self):
doorSide = 1 if self.graspingHand == 'left' else -1
startPose = self.getPlanningStartPose()
linkFrame = self.ikPlanner.getLinkFrameAtPose(self.ikPlanner.getHandLink(), startPose)
finalGraspToReferenceTransfrom = transformUtils.concatenateTransforms(
[self.ikPlanner.getPalmToHandLink(self.graspingHand), linkFrame,
self.doorHingeFrame.transform.GetInverse()])
handlePushTransform = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0],
[0, 0, -doorSide*self.handlePushAngle])
doorHandlePushFrame = transformUtils.concatenateTransforms([finalGraspToReferenceTransfrom,
handlePushTransform,
self.doorHingeFrame.transform])
vis.updateFrame(doorHandlePushFrame, 'debug push', parent=self.doorHandleAffordance, visible=False, scale=0.2)
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand,
doorHandlePushFrame)
constraintSet.ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedLow)
constraintSet.nominalPoseName = 'q_start'
endPose, info = constraintSet.runIk()
constraints = constraintSet.constraints
constraints.extend(self.createHingeConstraint(self.doorHingeFrame, [0.0, 0.0, 1.0],
self.ikPlanner.getHandLink(side=self.graspingHand),
constraintSet.startPoseName))
constraints.append(self.ikPlanner.createLockedBasePostureConstraint(constraintSet.startPoseName))
constraints.extend(self.ikPlanner.createFixedFootConstraints(constraintSet.startPoseName))
constraints.append(self.ikPlanner.createLockedArmPostureConstraint(constraintSet.startPoseName))
plan = constraintSet.runIkTraj()
self.addPlan(plan)
def planHandlePushLift(self):
self.planHandleTurn(-self.handleTurnAngle)
def planDoorTouch(self):
ikParameters = IkParameters(usePointwise=False, maxDegreesPerSecond=self.speedHigh)
nonGraspingHand = 'right' if self.graspingHand == 'left' else 'left'
startPose = self.getPlanningStartPose()
endPose = self.ikPlanner.getMergedPostureFromDatabase(startPose, 'door', 'pre-smash', side=nonGraspingHand)
newPlan = self.ikPlanner.computePostureGoal(startPose, endPose, ikParameters=ikParameters)
self.addPlan(newPlan)
def planHandlePushOpen(self):
startPose = self.getPlanningStartPose()
constraintSet = self.ikPlanner.planEndEffectorGoal(startPose, self.graspingHand, self.doorHandlePushOpenFrame)
constraintSet.ikParameters = IkParameters(usePointwise=False,
maxDegreesPerSecond=self.speedLow,
numberOfAddedKnots=2)
endPose, info = constraintSet.runIk()
plan = constraintSet.runIkTraj()
self.addPlan(plan)
def planFootstepsToDoor(self):
startPose = self.getPlanningStartPose()
goalFrame = self.doorHandleStanceFrame.transform
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def planFootstepsThroughDoor(self):
startPose = self.getPlanningStartPose()
goalFrame = self.doorWalkFrame.transform
request = self.footstepPlanner.constructFootstepPlanRequest(startPose, goalFrame)
request.params.nom_step_width = 0.21
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
rt._addPlanItem(self.footstepPlan, 'door walk frame footstep plan', rt.FootstepPlanItem)
def setFootstepThroughDoorParameters(self):
bias = -0.02
self.doorFootstepParams = FieldContainer(
leadingFoot = 'right',
preEntryFootWidth = -0.12 +bias,
preEntryFootDistance = -0.6,
entryFootWidth = 0.07 +bias,
entryFootDistance = -0.26,
exitFootWidth = -0.08 +bias,
exitFootDistance = 0.12,
exitStepDistance = 0.3,
endStanceWidth = 0.26,
numberOfExitSteps = 1,
centerStepDistance = 0.26,
centerStanceWidth = 0.20,
centerLeadingFoot = 'right'
)
def setTestingFootstepThroughDoorParameters(self):
self.doorFootstepParams = FieldContainer(
endStanceWidth = 0.26,
entryFootDistance = -0.26,
entryFootWidth = 0.12,
exitFootDistance = 0.12,
exitFootWidth = -0.12,
exitStepDistance = 0.3,
leadingFoot = 'right',
numberOfExitSteps = 1,
preEntryFootDistance = -0.55,
preEntryFootWidth = -0.12
)
def getRelativeFootstepsThroughDoorWithSway(self):
p = self.doorFootstepParams
stepFrames = [
[p.preEntryFootDistance, p.preEntryFootWidth, 0.0],
[p.entryFootDistance, p.entryFootWidth, 0.0],
[p.exitFootDistance, p.exitFootWidth, 0.0],
]
for i in xrange(p.numberOfExitSteps):
sign = -1 if (i%2) else 1
stepFrames.append([p.exitFootDistance + (i+1)*p.exitStepDistance, sign*p.endStanceWidth/2.0, 0.0])
lastStep = list(stepFrames[-1])
lastStep[1] *= -1
stepFrames.append(lastStep)
return FootstepRequestGenerator.makeStepFrames(stepFrames, relativeFrame=self.doorGroundFrame.transform, showFrames=False), p.leadingFoot
def getRelativeFootstepsThroughDoorCentered(self):
p = self.doorFootstepParams
stepDistance = p.centerStepDistance
stanceWidth = p.centerStanceWidth
leadingFoot = p.centerLeadingFoot
stepFrames = []
for i in xrange(30):
sign = -1 if leadingFoot is 'right' else 1
if i % 2:
sign = -sign
stepX = (i+1)*stepDistance
if stepX > 1.5:
stepX = 1.5
stepFrames.append([stepX, sign*stanceWidth/2.0, 0.0])
if stepX == 1.5:
break
lastStep = list(stepFrames[-1])
lastStep[1] *= -1
stepFrames.append(lastStep)
stepFrames[-1][1] = np.sign(stepFrames[-1][1])*(p.endStanceWidth/2.0)
stepFrames[-2][1] = np.sign(stepFrames[-2][1])*(p.endStanceWidth/2.0)
return FootstepRequestGenerator.makeStepFrames(stepFrames, relativeFrame=self.doorHandleStanceFrame.transform, showFrames=False), leadingFoot
def planManualFootstepsTest(self, stepDistance=0.26, stanceWidth=0.26, numberOfSteps=4, leadingFoot='right'):
stepFrames = []
for i in xrange(numberOfSteps):
sign = -1 if leadingFoot is 'right' else 1
if i % 2:
sign = -sign
stepFrames.append([(i+1)*stepDistance, sign*stanceWidth/2.0, 0.0])
lastStep = list(stepFrames[-1])
lastStep[1] *= -1
stepFrames.append(lastStep)
stanceFrame = FootstepRequestGenerator.getRobotStanceFrame(self.robotModel)
stepFrames = FootstepRequestGenerator.makeStepFrames(stepFrames, relativeFrame=stanceFrame)
startPose = self.getPlanningStartPose()
helper = FootstepRequestGenerator(self.footstepPlanner)
request = helper.makeFootstepRequest(startPose, stepFrames, leadingFoot)
self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
def planFootstepsThroughDoorManual(self):
startPose = self.getPlanningStartPose()
stepFrames, leadingFoot = self.getRelativeFootstepsThroughDoorCentered()
helper = FootstepRequestGenerator(self.footstepPlanner)
request = helper.makeFootstepRequest(startPose, stepFrames, leadingFoot, numberOfFillSteps=2)
self.footstepPlan = self.footstepPlanner.sendFootstepPlanRequest(request, waitForResponse=True)
rt._addPlanItem(self.footstepPlan, 'door walk frame footstep plan', rt.FootstepPlanItem)
def computeWalkingPlan(self):
startPose = self.getPlanningStartPose()
self.walkingPlan = self.footstepPlanner.sendWalkingPlanRequest(self.footstepPlan, startPose, waitForResponse=True)
self.addPlan(self.walkingPlan)
def commitManipPlan(self):
self.manipPlanner.commitManipPlan(self.plans[-1])
def fitDoor(self, doorGroundFrame):
om.removeFromObjectModel(om.findObjectByName('affordances'))
self.spawnDoorAffordance()
affordanceFrame = om.findObjectByName('door ground frame')
assert affordanceFrame is not None
affordanceFrame.copyFrame(doorGroundFrame)
om.findObjectByName('door').setProperty('Visible', False)
def showDoorHandlePoints(self, polyData):
doorHandle = om.findObjectByName('door handle')
door = om.findObjectByName('door')
doorWidth = door.getProperty('Dimensions')[1]
doorAxes = transformUtils.getAxesFromTransform(door.getChildFrame().transform)
doorOrigin = np.array(door.getChildFrame().transform.GetPosition())
handleAxes = transformUtils.getAxesFromTransform(doorHandle.getChildFrame().transform)
handleOrigin = np.array(doorHandle.getChildFrame().transform.GetPosition())
doorSide = 1 if self.graspingHand == 'left' else -1
polyData = segmentation.cropToLineSegment(polyData, doorOrigin - doorAxes[0]*0.02, doorOrigin - doorAxes[0]*0.1)
polyData = segmentation.cropToLineSegment(polyData, doorOrigin, doorOrigin + doorAxes[1]*(doorWidth*0.5-0.01)*doorSide)
polyData = segmentation.cropToLineSegment(polyData, handleOrigin - handleAxes[2]*0.1, handleOrigin + handleAxes[2]*0.1)
pointsName = 'door handle points'
existed = om.findObjectByName(pointsName) is not None
obj = vis.updatePolyData(polyData, pointsName, parent=doorHandle, color=[1,0,0])
if not existed:
obj.setProperty('Point Size', 10)
def spawnDoorAffordance(self):
groundFrame = self.computeGroundFrame(self.robotModel)
doorOffsetX = 0.7
doorOffsetY = 0.0
doorGroundFrame = transformUtils.frameFromPositionAndRPY([doorOffsetX, 0.0, 0.0], [0.0, 0.0, 0.0])
doorGroundFrame.PostMultiply()
doorGroundFrame.Concatenate(groundFrame)
stanceFrame = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 0.0], [0.0, 0.0, 0.0])
stanceFrame.PostMultiply()
stanceFrame.Concatenate(groundFrame)
doorWalkFrame = transformUtils.frameFromPositionAndRPY([doorOffsetX + 0.6, 0.0, 0.0], [0.0, 0.0, 0.0])
doorWalkFrame.PostMultiply()
doorWalkFrame.Concatenate(groundFrame)
doorWidth = 36 * 0.0254
doorHeight = 81 * 0.0254
doorDepth = 0.5 * 0.0254
doorSide = 1 if self.graspingHand == 'left' else -1
handleHeightFromGround = 35 * 0.0254
handleDistanceFromEdge = 1.625 * 0.0254
handleDistanceFromDoor = 1.75 * 0.0254
handleLength = 4.125 * 0.0254
handleDepth = 0.25 * 0.0254
doorJamWidth = 0.5
doorJamDepth = 4.5 * 0.0254
handleFrame = transformUtils.frameFromPositionAndRPY([-handleDistanceFromDoor - doorDepth/2.0 - handleDepth/2, doorSide*(doorWidth/2.0 - handleDistanceFromEdge - handleLength/2.0), handleHeightFromGround], [0.0, 0.0, 0.0])
handleFrame.PostMultiply()
handleFrame.Concatenate(doorGroundFrame)
doorFrame = transformUtils.frameFromPositionAndRPY([0.0, 0.0, doorHeight/2.0], [0.0, 0.0, 0.0])
doorFrame.PostMultiply()
doorFrame.Concatenate(doorGroundFrame)
leftDoorJamFrame = transformUtils.frameFromPositionAndRPY([0.0, (doorWidth/2.0 + doorJamWidth/2.0), doorHeight/2.0], [0.0, 0.0, 0.0])
leftDoorJamFrame.PostMultiply()
leftDoorJamFrame.Concatenate(doorGroundFrame)
rightDoorJamFrame = transformUtils.frameFromPositionAndRPY([0.0, -(doorWidth/2.0 + doorJamWidth/2.0), doorHeight/2.0], [0.0, 0.0, 0.0])
rightDoorJamFrame.PostMultiply()
rightDoorJamFrame.Concatenate(doorGroundFrame)
desc = dict(classname='BoxAffordanceItem', Name='door handle',
pose=transformUtils.poseFromTransform(handleFrame), Dimensions=[handleDepth, handleLength, 0.02], Color=[0.0, 1.0, 0.0])
handleAffordance = segmentation.affordanceManager.newAffordanceFromDescription(desc)
desc = dict(classname='BoxAffordanceItem', Name='door',
pose=transformUtils.poseFromTransform(doorFrame), Dimensions=[doorDepth, doorWidth, doorHeight], Color=[0.5, 0.5, 0.5])
doorAffordance = segmentation.affordanceManager.newAffordanceFromDescription(desc)
desc = dict(classname='BoxAffordanceItem', Name='left door jam',
pose=transformUtils.poseFromTransform(leftDoorJamFrame), Dimensions=[doorJamDepth, doorJamWidth, doorHeight], Color=[0.7, 0.0, 0.0])
leftDoorJamAffordance = segmentation.affordanceManager.newAffordanceFromDescription(desc)
desc = dict(classname='BoxAffordanceItem', Name='right door jam',
pose=transformUtils.poseFromTransform(rightDoorJamFrame), Dimensions=[doorJamDepth, doorJamWidth, doorHeight], Color=[0.7, 0.0, 0.0])
rightDoorJamAffordance = segmentation.affordanceManager.newAffordanceFromDescription(desc)
doorGroundFrame = vis.showFrame(doorGroundFrame, 'door ground frame', parent=doorAffordance)
stanceFrame = vis.showFrame(stanceFrame, 'door stance frame', parent=doorAffordance)
doorWalkFrame = vis.showFrame(doorWalkFrame, 'door walk frame', visible=False, parent=doorAffordance)
doorFrame = doorAffordance.getChildFrame()
handleFrame = handleAffordance.getChildFrame()
leftDoorJamFrame = leftDoorJamAffordance.getChildFrame()
rightDoorJamFrame = rightDoorJamAffordance.getChildFrame()
self.doorFrameSync = vis.FrameSync()
self.doorFrameSync.addFrame(doorGroundFrame)
self.doorFrameSync.addFrame(stanceFrame, ignoreIncoming=True)
self.doorFrameSync.addFrame(doorWalkFrame, ignoreIncoming=True)
self.doorFrameSync.addFrame(doorFrame, ignoreIncoming=True)
self.doorFrameSync.addFrame(leftDoorJamFrame, ignoreIncoming=True)
self.doorFrameSync.addFrame(rightDoorJamFrame, ignoreIncoming=True)
self.doorHandleFrameSync = vis.FrameSync()
self.doorHandleFrameSync.addFrame(doorFrame)
self.doorHandleFrameSync.addFrame(handleFrame, ignoreIncoming=True)
self.findDoorHandleAffordance()
self.doorGroundFrame = doorGroundFrame
self.doorHandleStanceFrame = stanceFrame
self.doorWalkFrame = doorWalkFrame
def findDoorHandleAffordance(self):
self.doorHandleAffordance = om.findObjectByName('door handle')
self.doorHandleFrame = self.doorHandleAffordance.getChildFrame()
self.computeDoorHingeFrame()
self.computeDoorHandleGraspFrame()
def getEstimatedRobotStatePose(self):
return np.array(self.sensorJointController.getPose('EST_ROBOT_STATE'))
def getPlanningStartPose(self):
if self.planFromCurrentRobotState:
return self.getEstimatedRobotStatePose()
else:
if self.plans:
return robotstate.convertStateMessageToDrakePose(self.plans[-1].plan[-1])
else:
return self.getEstimatedRobotStatePose()
class DoorImageFitter(ImageBasedAffordanceFit):
def __init__(self, doorDemo):
ImageBasedAffordanceFit.__init__(self, numberOfPoints=1)
self.doorDemo = doorDemo
def fit(self, polyData, points):
stanceFrame = FootstepRequestGenerator.getRobotStanceFrame(self.doorDemo.robotModel)
doorGroundFrame = segmentation.segmentDoorPlane(polyData, points[0], stanceFrame)
self.doorDemo.fitDoor(doorGroundFrame)
self.doorDemo.showDoorHandlePoints(polyData)
class DoorTaskPanel(TaskUserPanel):
def __init__(self, doorDemo):
TaskUserPanel.__init__(self, windowTitle='Door Task')
self.doorDemo = doorDemo
self.fitter = DoorImageFitter(self.doorDemo)
self.initImageView(self.fitter.imageView)
self.addDefaultProperties()
self.addButtons()
self.addTasks()
def addButtons(self):
self.addManualButton('Spawn door', self.doorDemo.spawnDoorAffordance)
self.addManualSpacer()
self.addManualButton('Footsteps to door', self.doorDemo.planFootstepsToDoor)
self.addManualButton('Footsteps through door', self.doorDemo.planFootstepsThroughDoor)
self.addManualSpacer()
self.addManualButton('Raise arms', self.doorDemo.planPreReach)
self.addManualButton('Tuck Arms (pre-push)', self.doorDemo.planTuckArmsPrePush)
self.addManualButton('Tuck Arms', self.doorDemo.planTuckArms)
self.addManualSpacer()
self.addManualButton('Open pinch', self.openPinch)
self.addManualButton('Close pinch', self.closePinch)
self.addManualSpacer()
self.addManualButton('Reach', self.doorDemo.planReach)
self.addManualButton('Un-reach', self.doorDemo.planUnReach)
self.addManualSpacer()
self.addManualButton('Pre-chop out', self.doorDemo.planPreChop)
self.addManualButton('Chop', self.doorDemo.planChop)
self.addManualButton('Un-chop', functools.partial(self.doorDemo.planChop, deltaX=-0.1, deltaY=-0.1, deltaZ=0.1))
self.addManualSpacer()
self.addManualButton('Turn more', functools.partial(self.doorDemo.planHandleTurn, 10))
self.addManualButton('Turn less', functools.partial(self.doorDemo.planHandleTurn, -10))
self.addManualButton('Twist arm', self.doorDemo.planDoorPushOpenTwist)
self.addManualSpacer()
self.addManualButton('Commit Manip', self.doorDemo.commitManipPlan)
self.addManualButton('Stop pushing', self.doorDemo.stopPushing)
def getSide(self):
return self.params.getPropertyEnumValue('Hand').lower()
def openPinch(self):
rt.OpenHand(side=self.getSide().capitalize(), mode='Pinch').run()
def closePinch(self):
rt.CloseHand(side=self.getSide().capitalize(), mode='Pinch').run()
def addDefaultProperties(self):
self.params.addProperty('Hand', 0, attributes=om.PropertyAttributes(enumNames=['Left', 'Right']))
self.params.addProperty('Pre-chop width', self.doorDemo.preChopWidth, attributes=om.PropertyAttributes(singleStep=0.01, decimals=3))
self.params.addProperty('Pre-chop depth', self.doorDemo.preChopDepth, attributes=om.PropertyAttributes(singleStep=0.01, decimals=3))
self.params.addProperty('Pre-chop height', self.doorDemo.preChopHeight, attributes=om.PropertyAttributes(singleStep=0.01, decimals=3))
self.params.addProperty('Chop distance', self.doorDemo.chopDistance, attributes=om.PropertyAttributes(singleStep=0.01, decimals=3))
self.params.addProperty('Chop sideways distance', self.doorDemo.chopSidewaysDistance, attributes=om.PropertyAttributes(singleStep=0.01, decimals=3))
self._syncProperties()
def onPropertyChanged(self, propertySet, propertyName):
if propertyName == 'Hand':
self.taskTree.removeAllTasks()
self.addTasks()
self.doorDemo.findDoorHandleAffordance()
self._syncProperties()
def _syncProperties(self):
self.doorDemo.graspingHand = self.params.getPropertyEnumValue('Hand').lower()
self.doorDemo.ikPlanner.reachingSide = self.doorDemo.graspingHand
if hasattr(self.doorDemo, 'doorHandleAffordance'):
self.doorDemo.computeDoorHandleGraspFrame()
self.doorDemo.chopDistance = self.params.getProperty('Chop distance')
self.doorDemo.chopSidewaysDistance = self.params.getProperty('Chop sideways distance')
self.doorDemo.preChopWidth = self.params.getProperty('Pre-chop width')
self.doorDemo.preChopDepth = self.params.getProperty('Pre-chop depth')
self.doorDemo.preChopHeight = self.params.getProperty('Pre-chop height')
def addTasks(self):
self.folder = None
def addTask(task, parent=None):
parent = parent or self.folder
self.taskTree.onAddTask(task, copy=False, parent=parent)
def addFunc(func, name, parent=None):
addTask(rt.CallbackTask(callback=func, name=name), parent=parent)
def addFolder(name, parent=None):
self.folder = self.taskTree.addGroup(name, parent=parent)
return self.folder
d = self.doorDemo
self.taskTree.removeAllTasks()
side = self.params.getPropertyEnumValue('Hand')
folder = addFolder('Prep')
addTask(rt.CloseHand(name='close left hand', side='Left'))
addTask(rt.CloseHand(name='close right hand', side='Right'))
addTask(rt.UserPromptTask(name='fit door', message='Please fit and approve door affordance.'))
addTask(rt.FindAffordance(name='check door affordance', affordanceName='door'))
addTask(rt.SetNeckPitch(name='set neck position', angle=35))
folder = addFolder('Walk and refit')
addTask(rt.RequestFootstepPlan(name='plan walk to door', stanceFrameName='door stance frame'))
addTask(rt.UserPromptTask(name='approve footsteps', message='Please approve footstep plan.'))
addTask(rt.CommitFootstepPlan(name='walk to door', planName='door stance frame footstep plan'))
addTask(rt.WaitForWalkExecution(name='wait for walking'))
addTask(rt.UserPromptTask(name='fit door', message='Please fit and approve door handle affordance.'))
addTask(rt.OpenHand(name='open hand', side=side, mode='Pinch'))
def addManipTask(name, planFunc, userPrompt=False):
folder = addFolder(name)
addFunc(planFunc, name='plan')
if not userPrompt:
addTask(rt.CheckPlanInfo(name='check manip plan info'))
else:
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manipulation plan.'))
addFunc(d.commitManipPlan, name='execute manip plan')
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
addManipTask('Raise arms', d.planPreReach, userPrompt=False)
addManipTask('Raise pushing hand', d.planDoorTouch, userPrompt=False)
if d.usePinchGrasp:
addManipTask('Reach', d.planReach, userPrompt=True)
addFunc(self.closePinch, name='Pinch handle')
addTask(rt.UserPromptTask(name='Approve grasp',
message='Please verify the pinch grasp'))
addManipTask('Turn', d.planHandleTurn, userPrompt=False)
addTask(rt.UserPromptTask(name='Approve handle turn',
message='Please verify that the handle has turned'))
else:
addFunc(self.doorDemo.setChopParametersToDefaults, name='re-set chop parameters')
addFunc(self.closePinch, name='Close hand')
addManipTask('Reach', d.planReach, userPrompt=True)
addManipTask('Chop', d.planChop, userPrompt=True)
addManipTask('Push ajar', d.planHandlePush, userPrompt=False)
addTask(rt.UserPromptTask(name='Approve door position',
message='Please verify that the door is ajar'))
addManipTask('Push ajar again', d.planHandlePush, userPrompt=False)
if d.usePinchGrasp:
addManipTask('Lift', d.planHandlePushLift, userPrompt=False)
addTask(rt.CloseHand(name='Open hand', side=side, mode='Pinch', amount=0))
addManipTask('Push open', d.planDoorPushOpen, userPrompt=False)
addTask(rt.UserPromptTask(name='Approve door position',
message='Please verify that the door is open'))
addTask(rt.CloseHand(name='Close hand', side=side))
addManipTask('Tuck Arms', d.planTuckArms, userPrompt=False)
addTask(rt.CloseHand(name='Close fist', side=side))
folder = addFolder('Walk through door')
addFunc(d.planFootstepsThroughDoorManual, name='plan walk through door')
addTask(rt.UserPromptTask(name='approve footsteps', message='Please approve footstep plan.'))
addTask(rt.CommitFootstepPlan(name='walk to door', planName='door walk frame footstep plan'))
addTask(rt.WaitForWalkExecution(name='wait for walking'))
folder = addFolder('Prep for walking')
addTask(rt.CloseHand(name='close left hand', side='Left'))
addTask(rt.CloseHand(name='close right hand', side='Right'))
addTask(rt.PlanPostureGoal(name='plan walk posture', postureGroup='General', postureName='safe nominal', side='Default'))
addTask(rt.UserPromptTask(name='approve manip plan', message='Please approve manip plan.'))
addTask(rt.CommitManipulationPlan(name='execute manip plan', planName='safe nominal posture plan'))
addTask(rt.WaitForManipulationPlanExecution(name='wait for manip execution'))
|
data/TheGhouls/oct/oct/results/models.py
|
import json
import datetime
from peewee import Proxy, TextField, FloatField, CharField, IntegerField, SqliteDatabase, Model, DateTimeField
db = Proxy()
class Result(Model):
"""Define a result model
"""
error = TextField(null=True)
scriptrun_time = FloatField()
elapsed = FloatField()
epoch = FloatField()
custom_timers = TextField(null=True)
turret_name = CharField(default='Noname')
def to_dict(self):
return {
'error': self.error,
'scriptrun_time': self.scriptrun_time,
'elapsed': self.elapsed,
'epoch': self.epoch,
'custom_timers': json.loads(self.custom_timers),
'turret_name': self.turret_name
}
class Meta:
database = db
class Turret(Model):
"""Define a turret model
"""
name = TextField()
uuid = TextField()
cannons = IntegerField()
script = TextField()
rampup = IntegerField()
status = TextField()
updated_at = DateTimeField(default=datetime.datetime.now())
def save(self, *args, **kwargs):
self.updated_at = datetime.datetime.now()
return super(Turret, self).save(*args, **kwargs)
def to_dict(self):
return {
'name': self.name,
'uuid': self.uuid,
'cannons': self.cannons,
'script': self.script,
'rampup': self.rampup,
'status': self.status,
'updated_at': self.updated_at
}
class Meta:
database = db
def set_database(db_path, proxy, config):
"""Initialize the peewee database with the given configuration
:param str db_path: the path of the sqlite database
:param peewee.Proxy proxy: the peewee proxy to initialise
:param dict config: the configuration dictionnary
"""
if 'testing' in config and config['testing'] is True:
database = SqliteDatabase('/tmp/results.sqlite', check_same_thread=False)
else:
database = SqliteDatabase(db_path, check_same_thread=False)
proxy.initialize(database)
|
data/AppScale/appscale/AppServer/lib/argparse/argparse.py
|
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.2.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
set
except NameError:
from sets import Set as set
try:
basestring
except NameError:
basestring = str
try:
sorted
except NameError:
def sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
if not item_help:
return ''
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
if usage is not None:
usage = usage % dict(prog=self._prog)
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
usage = '\n'.join(lines)
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
parts = []
for i, action in enumerate(actions):
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
elif not action.option_strings:
part = self._format_args(action, action.dest)
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
parts.append(part)
else:
option_string = action.option_strings[0]
if action.nargs == 0:
part = '%s' % option_string
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
if not action.required and action not in group_actions:
part = '[%s]' % part
parts.append(part)
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
text = ' '.join([item for item in parts if item is not None])
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
parts = [action_header]
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
elif not action_header.endswith('\n'):
parts.append('\n')
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
if action.nargs == 0:
parts.extend(action.option_strings)
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
__hash__ = None
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
self._registries = {}
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
self._get_handler()
self._actions = []
self._option_string_actions = {}
self._action_groups = []
self._mutually_exclusive_groups = []
self._defaults = {}
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
self._has_negative_number_optionals = []
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
self._check_conflict(action)
self._actions.append(action)
action.container = self
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
group_map = {}
for group in container._action_groups:
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
for action in group._group_actions:
group_map[action] = mutex_group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
option_strings = []
long_option_strings = []
for option_string in args:
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
for option_string, action in conflicting_actions:
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
self.title = title
self._group_actions = []
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
def identity(string):
return string
self.register('type', None, identity)
if '-' in prefix_chars:
default_prefix = '-'
else:
default_prefix = prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
default_prefix+'v', default_prefix*2+'version',
action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
if args is None:
args = _sys.argv[1:]
if namespace is None:
namespace = Namespace()
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
arg_strings_pattern = ''.join(arg_string_pattern_parts)
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
def consume_optional(start_index):
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
match_argument = self._match_argument
action_tuples = []
while True:
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
positionals = self._get_positional_actions()
def consume_positionals(start_index):
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
positionals[:] = positionals[len(arg_counts):]
return start_index
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
start_index = consume_optional(start_index)
stop_index = consume_positionals(start_index)
extras.extend(arg_strings[stop_index:])
if positionals:
self.error(_('too few arguments'))
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
return namespace, extras
def _read_args_from_files(self, arg_strings):
new_arg_strings = []
for arg_string in arg_strings:
if arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
return result
def _parse_optional(self, arg_string):
if not arg_string:
return None
if not arg_string[0] in self.prefix_chars:
return None
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
if len(arg_string) == 1:
return None
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
option_tuples = self._get_option_tuples(arg_string)
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
if ' ' in arg_string:
return None
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
else:
self.error(_('unexpected option string: %s') % option_string)
return result
def _get_nargs_pattern(self, action):
nargs = action.nargs
if nargs is None:
nargs_pattern = '(-*A-*)'
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
return nargs_pattern
def _get_values(self, action, arg_strings):
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, basestring):
value = self._get_value(action, value)
self._check_value(action, value)
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
try:
result = type_func(arg_string)
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
return result
def _check_value(self, action, value):
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
formatter.add_text(self.description)
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
formatter.add_text(self.epilog)
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
|
data/JeremyOT/Toto/toto/events.py
|
'''Toto's event framework is used to allow external events to affect client requests, or to run scheduled tasks
after a specified signal is received. It can be used to send messages to active requests, even between multiple
server processes. The event framework can also be used outside of Toto to send messages to running Toto servers.
'''
import cPickle as pickle
from threading import Thread
from collections import deque
from tornado.web import *
from tornado.ioloop import IOLoop
from traceback import format_exc
from tornado.options import options
import zmq
import logging
import zlib
from random import choice, shuffle
class EventManager():
'''Instances will listen on ``address`` for incoming events.
'''
def __init__(self, address=None):
self.__handlers = {}
self.address = address
self.__zmq_context = zmq.Context()
self.__remote_servers = {}
self.__thread = None
self.__queued_servers = deque()
def register_server(self, address):
'''Add a server located at ``address``. This server will now be included in the
recipient list whenever ``send()`` is called.
'''
if address in self.__remote_servers:
raise Exception('Server already registered: %s', address)
socket = self.__zmq_context.socket(zmq.PUSH)
socket.connect(address)
self.__remote_servers[address] = socket
self.refresh_server_queue()
def remove_server(self, address):
'''Remove the server located at ``address`` from the recipient list for all
future calls to ``send()``.
'''
del self.__remote_servers[address]
self.refresh_server_queue()
def remove_all_servers(self):
'''Clear the recipient list for all future calls to ``send``.
'''
self.__remote_servers.clear()
self.refresh_server_queue()
def refresh_server_queue(self):
'''Reload and shuffle the registered server queue used for round-robin load
balancing of non-broadcast events.
'''
self.__queued_servers.clear()
self.__queued_servers.extend(self.__remote_servers.itervalues())
shuffle(self.__queued_servers)
def register_handler(self, event_name, event_handler, run_on_main_loop=False, request_handler=None, persist=False):
'''Register ``event_handler`` to run when ``event_name`` is received. Handlers are meant to respond to
a single event matching ``event_name`` only. If ``run_on_main_loop`` is ``True`` the handler will be executed
on Tornado's main ``IOLoop`` (required if the handler will write to a response stream). If ``request_handler``
is set, ``event_handler`` will not fire once ``request_handler`` has finished. Set ``persist`` to ``True``
to automatically requeue ``event_handler`` each time it is executed.
'''
if not event_name in self.__handlers:
self.__handlers[event_name] = set()
handler_tuple = (event_handler, run_on_main_loop, request_handler, persist)
self.__handlers[event_name].add(handler_tuple)
return (event_name, handler_tuple)
def remove_handler(self, handler_sig):
'''Disable and remove the handler matching ``handler_sig``.
'''
self.__handlers[handler_sig[0]].discard(handler_sig[1])
def start_listening(self):
'''Starts listening for incoming events on ``EventManager.address``.
'''
if self.__thread:
return
def receive():
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.bind(self.address)
while True:
event = pickle.loads(zlib.decompress(socket.recv()))
event_name = event['name']
event_args = event['args']
if event_name in self.__handlers:
handlers = self.__handlers[event_name]
for handler in list(handlers):
if not handler[3]:
handlers.remove(handler)
try:
if handler[2] and handler[2]._finished:
continue
if handler[1]:
(lambda h: IOLoop.instance().add_callback(lambda: h[0](event_args)))(handler)
else:
handler[0](event_args)
except Exception as e:
logging.error(format_exc())
self.__thread = Thread(target=receive)
self.__thread.daemon = True
self.__thread.start()
def send_to_server(self, address, event_name, event_args):
'''Send a message with ``event_name`` and ``event_args`` only
to the server listening at ``address``. ``address`` must have
previously been passed to ``register_server``. This is more
efficient than ``send`` if you only intent to send the event
to a single server and know the address in advance.
'''
event = {'name': event_name, 'args': event_args}
event_data = zlib.compress(pickle.dumps(event))
self.__remote_servers[address].send(event_data)
def send(self, event_name, event_args, broadcast=True):
'''Send a message with ``event_name`` and ``event_args`` to
all servers previously registered with ``register_server()``.
If ``broadcast`` is false, the event will be sent to only
a single server. Non-broadcast events are round-robin load
balanced between registered servers.
'''
if not self.__remote_servers:
return
event = {'name': event_name, 'args': event_args}
event_data = zlib.compress(pickle.dumps(event))
if not broadcast:
self.__queued_servers[0].send(event_data)
self.__queued_servers.rotate(-1)
return
for socket in self.__queued_servers:
socket.send(event_data)
@classmethod
def instance(cls):
'''Returns the shared instance of ``EventManager``, instantiating on the first call.
'''
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
|
data/NathanEpstein/Dora/Dora/__init__.py
|
from .main import Dora
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/mpiwrap.py
|
import os
import sys
import numpy
from contextlib import contextmanager
def _redirect_streams(to_fd):
"""Redirect stdout/stderr to the given file descriptor.
Based on: http://eli.thegreenplace.net/2015/redirecting-all-kinds-of-stdout-in-python/
"""
original_stdout_fd = sys.stdout.fileno()
original_stderr_fd = sys.stderr.fileno()
sys.stdout.close()
sys.stderr.close()
os.dup2(to_fd, original_stdout_fd)
os.dup2(to_fd, original_stderr_fd)
sys.stdout = os.fdopen(original_stdout_fd, 'wb', 0)
sys.stderr = os.fdopen(original_stderr_fd, 'wb', 0)
def use_proc_files():
if MPI is not None:
rank = MPI.COMM_WORLD.rank
sname = "%s.out" % rank
ofile = open(sname, 'wb')
_redirect_streams(ofile.fileno())
def under_mpirun():
"""Return True if we're being executed under mpirun."""
for name in os.environ.keys():
if name.startswith('OMPI_COMM') or name.startswith('MPIR_'):
return True
return False
class PETSc(object):
def __init__(self):
self.needs_ksp = False
self._PETSc = None
@property
def installed(self):
try:
if self._PETSc is None:
PETSc = _import_petsc()
del sys.modules['petsc4py']
self._PETSc = PETSc
return True
except ImportError:
self._PETSc = None
return False
def __getattr__(self, name):
if self.installed:
return getattr(self._PETSc, name)
raise AttributeError(name)
def create_petsc_vec(comm, arr):
if under_mpirun() or PETSc.needs_ksp:
if PETSc.installed and (MPI is None or comm != MPI.COMM_NULL):
return PETSc.Vec().createWithArray(arr, comm=comm)
return None
def _import_petsc():
import petsc4py
from petsc4py import PETSc
return PETSc
if under_mpirun():
from mpi4py import MPI
PETSc = _import_petsc()
PETSc.installed = True
COMM_NULL = MPI.COMM_NULL
else:
MPI = None
COMM_NULL = None
PETSc = PETSc()
class MPI_info(object):
def __init__(self):
self.requested_cpus = (1, 1)
self.comm = COMM_NULL
@property
def size(self):
if MPI and self.comm != COMM_NULL:
return self.comm.size
return 1
@property
def rank(self):
if MPI:
if self.comm != COMM_NULL:
return self.comm.rank
else:
return -1
return 0
def get_norm(vec, order=None):
"""Either do a distributed norm or a local numpy
norm depending on whether we're running under MPI.
vec: VecWrapper
Returns the norm of this vector
order: int, float, string (see numpy.linalg.norm)
Order of the norm (ignored in MPI)
"""
if MPI:
vec.petsc_vec.assemble()
return vec.petsc_vec.norm()
else:
return numpy.linalg.norm(vec.array, ord=order)
idx_arr_type = PETSc.IntType if MPI else 'i'
def make_idx_array(start, end):
""" Return an index vector of the right int type for
parallel or serial computation.
"""
return numpy.arange(start, end, dtype=idx_arr_type)
def to_idx_array(idxs):
""" Return an index vector of the right int type for
parallel or serial computation.
"""
return numpy.array(idxs, dtype=idx_arr_type)
def evenly_distrib_idxs(num_divisions, arr_size):
"""Given a number of divisions and the size of an array, chop the array up
into pieces according to number of divisions, keeping the distribution
of entries as even as possible. Returns a tuple of
(sizes, offsets), where sizes and offsets contain values for all
divisions.
"""
base = arr_size / num_divisions
leftover = arr_size % num_divisions
sizes = numpy.ones(num_divisions, dtype="int") * base
sizes[:leftover] += 1
offsets = numpy.zeros(num_divisions, dtype="int")
offsets[1:] = numpy.cumsum(sizes)[:-1]
return sizes, offsets
@contextmanager
def MPIContext():
"""Wrap this around code that you want to globally fail if it fails
on any MPI process in MPI_WORLD.
"""
try:
yield
except:
exc_type, exc_val, exc_tb = sys.exc_info()
if exc_val is not None:
fail = True
else:
fail = False
fails = MPI.COMM_WORLD.allgather(fail)
if fail or not any(fails):
raise exc_type, exc_val, exc_tb
else:
for i,f in enumerate(fails):
if f:
raise RuntimeError("a test failed in (at least) rank %d" % i)
if os.environ.get('USE_PROC_FILES'):
use_proc_files()
|
data/Locu/djoauth2/docs/conf.py
|
import sys, os
sys.path.insert(0, os.path.abspath('..'))
import local_settings
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'DJOAuth2'
copyright = u'(see the license file)'
html_show_copyright = False
import djoauth2
version = djoauth2.__version__
release = djoauth2.__version__
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'DJOAuth2doc'
latex_elements = {
}
latex_documents = [
('index', 'DJOAuth2.tex', u'DJOAuth2 Documentation',
u'Peter Downs', 'manual'),
]
man_pages = [
('index', 'djoauth2', u'DJOAuth2 Documentation',
[u'Peter Downs'], 1)
]
texinfo_documents = [
('index', 'DJOAuth2', u'DJOAuth2 Documentation',
u'Peter Downs', 'DJOAuth2', 'One line description of project.',
'Miscellaneous'),
]
epub_title = u'DJOAuth2'
epub_author = u'Peter Downs'
epub_publisher = u'Peter Downs'
epub_copyright = u'2013, Peter Downs'
|
data/Yubico/yubikey-neo-manager/neoman/view/neo.py
|
import os
from PySide import QtGui, QtCore
from functools import partial
from neoman import messages as m
from neoman.storage import settings
from neoman.exc import ModeSwitchError
from neoman.model.neo import YubiKeyNeo
from neoman.model.applet import Applet
from neoman.model.modes import MODE
from neoman.view.tabs import TabWidgetWithAbout
U2F_URL = "http://www.yubico.com/products/yubikey-hardware/yubikey-neo/" \
+ "yubikey-neo-u2f/"
def get_text(*args, **kwargs):
flags = (
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowSystemMenuHint
)
kwargs['flags'] = flags
return QtGui.QInputDialog.getText(*args, **kwargs)
class NeoPage(TabWidgetWithAbout):
_neo = QtCore.Signal(YubiKeyNeo)
applet = QtCore.Signal(Applet)
def __init__(self):
super(NeoPage, self).__init__()
self._tabs = []
self._supported = True
self._unsupported_tab = UnsupportedTab()
settings_tab = SettingsTab()
self._neo.connect(settings_tab.set_neo)
self.addTab(settings_tab, m.settings)
if QtCore.QCoreApplication.instance().devmode:
apps = AppsTab(self, 1)
self._neo.connect(apps.set_neo)
apps.applet.connect(self._set_applet)
self.addTab(apps, m.installed_apps)
def addTab(self, tab, title):
self._tabs.append((tab, title))
if self._supported:
super(NeoPage, self).addTab(tab, title)
@QtCore.Slot(YubiKeyNeo)
def setNeo(self, neo):
self._supported = neo and neo.supported
self.clear()
if self._supported:
for (tab, title) in self._tabs:
super(NeoPage, self).addTab(tab, title)
else:
super(NeoPage, self).addTab(self._unsupported_tab, m.settings)
self._neo.emit(neo)
@QtCore.Slot(Applet)
def _set_applet(self, applet):
self.applet.emit(applet)
class UnsupportedTab(QtGui.QWidget):
def __init__(self):
super(UnsupportedTab, self).__init__()
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(m.unsupported_device))
self.setLayout(layout)
class SettingsTab(QtGui.QWidget):
def __init__(self):
super(SettingsTab, self).__init__()
self._neo = None
self._name = QtGui.QLabel()
self._serial = QtGui.QLabel()
self._firmware = QtGui.QLabel()
self._u2f = QtGui.QLabel()
self._u2f.setOpenExternalLinks(True)
layout = QtGui.QVBoxLayout()
name_row = QtGui.QHBoxLayout()
name_row.addWidget(self._name)
self._name_btn = QtGui.QPushButton(m.change_name)
self._name_btn.clicked.connect(self.change_name)
name_row.addWidget(self._name_btn)
details_row = QtGui.QHBoxLayout()
details_row.addWidget(self._serial)
details_row.addWidget(self._firmware)
self._u2f_row = QtGui.QHBoxLayout()
self._u2f_row.addWidget(QtGui.QLabel())
self._u2f_row.addWidget(self._u2f)
layout.addLayout(name_row)
layout.addLayout(details_row)
layout.addLayout(self._u2f_row)
button = QtGui.QPushButton(m.manage_keys)
button.clicked.connect(self.manage_keys)
self._mode_btn = QtGui.QPushButton(m.change_mode)
self._mode_btn.clicked.connect(self.change_mode)
layout.addWidget(self._mode_btn)
self._mode_note = QtGui.QLabel(m.note_1 % m.mode_note)
self._mode_note.setWordWrap(True)
layout.addWidget(self._mode_note)
layout.addStretch()
self.setLayout(layout)
@QtCore.Slot(YubiKeyNeo)
def set_neo(self, neo):
self._neo = neo
if not neo:
return
self._name_btn.setDisabled(neo.serial is None)
self._name.setText(m.name_1 % neo.name)
self._serial.setText(m.serial_1 % neo.serial)
show_firmware = neo.version != (0, 0, 0)
self._u2f_row.setDirection(
QtGui.QBoxLayout.LeftToRight if show_firmware else
QtGui.QBoxLayout.RightToLeft)
self._firmware.setVisible(show_firmware)
self._firmware.setText(m.firmware_1 % '.'.join(map(str, neo.version)))
if neo.allowed_modes[2]:
self._u2f.setText(m.u2f_1 % m.u2f_supported)
else:
self._u2f.setText(m.u2f_1 % m.u2f_not_supported_1 % U2F_URL)
self._mode_btn.setText(m.change_mode_1 % MODE.name_for_mode(neo.mode))
self._mode_note.setVisible(neo.version < (4, 1, 0))
def change_name(self):
name, ok = get_text(self, m.name, m.change_name_desc,
text=self._neo.name)
if ok:
self._neo.name = name
self._name.setText(m.name_1 % name)
def manage_keys(self):
print m.manage_keys
def change_mode(self):
mode = ModeDialog.change_mode(self._neo, self)
if mode is not None:
try:
self._neo.set_mode(mode)
except ModeSwitchError:
QtGui.QMessageBox.critical(self, m.mode_error,
m.mode_error_desc)
return
self._mode_btn.setText(m.change_mode_1 % MODE.name_for_mode(mode))
remove_dialog = QtGui.QMessageBox(self)
remove_dialog.setWindowTitle(m.change_mode)
remove_dialog.setIcon(QtGui.QMessageBox.Information)
remove_dialog.setText(m.remove_device)
remove_dialog.setStandardButtons(QtGui.QMessageBox.NoButton)
self._neo.removed.connect(remove_dialog.accept)
remove_dialog.exec_()
class ModeDialog(QtGui.QDialog):
def __init__(self, neo, parent=None):
super(ModeDialog, self).__init__(parent)
self.setWindowFlags(self.windowFlags()
^ QtCore.Qt.WindowContextHelpButtonHint)
layout = QtGui.QVBoxLayout()
layout.addWidget(QtGui.QLabel(m.change_mode_desc))
boxes = QtGui.QHBoxLayout()
self._otp = QtGui.QCheckBox(m.otp)
self._otp.clicked.connect(self._state_changed)
boxes.addWidget(self._otp)
self._ccid = QtGui.QCheckBox(m.ccid)
self._ccid.clicked.connect(self._state_changed)
boxes.addWidget(self._ccid)
self._u2f = QtGui.QCheckBox(m.u2f)
self._u2f.clicked.connect(self._state_changed)
boxes.addWidget(self._u2f)
layout.addLayout(boxes)
buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok |
QtGui.QDialogButtonBox.Cancel)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
self._ok = buttons.button(QtGui.QDialogButtonBox.Ok)
layout.addWidget(buttons)
self.setWindowTitle(m.change_mode)
self.setLayout(layout)
allowed = neo.allowed_modes
self._otp.setEnabled(allowed[0])
self._otp.setVisible(allowed[0])
self._ccid.setEnabled(allowed[1])
self._ccid.setVisible(allowed[1])
self._u2f.setEnabled(allowed[2])
self._u2f.setVisible(allowed[2])
self.mode = neo.mode
def _state_changed(self):
self._ok.setDisabled(not any(self.flags))
@property
def flags(self):
return self._otp.isChecked(), self._ccid.isChecked(), \
self._u2f.isChecked()
@property
def mode(self):
return MODE.mode_for_flags(*self.flags)
@mode.setter
def mode(self, value):
otp, ccid, u2f, touch_eject = MODE.flags_for_mode(value)
self._otp.setChecked(otp and self._otp.isEnabled())
self._ccid.setChecked(ccid and self._ccid.isEnabled())
self._u2f.setChecked(u2f and self._u2f.isEnabled())
@classmethod
def change_mode(cls, neo, parent=None):
dialog = cls(neo, parent)
if dialog.exec_():
mode = dialog.mode
legacy = neo.version < (3, 3, 0)
if legacy and mode == 2:
mode = 0x82
return mode
else:
return None
class AppsTab(QtGui.QWidget):
applet = QtCore.Signal(Applet)
def __init__(self, parent, index):
super(AppsTab, self).__init__()
self.parent = parent
self.index = index
layout = QtGui.QVBoxLayout()
self._apps = []
self._apps_list = QtGui.QListView()
self._apps_list.setModel(QtGui.QStringListModel([]))
self._apps_list.setEditTriggers(QtGui.QListView.NoEditTriggers)
self._apps_list.doubleClicked.connect(self.open_app)
layout.addWidget(self._apps_list)
self._install_cap_btn = QtGui.QPushButton(m.install_cap)
self._install_cap_btn.clicked.connect(self.install_cap)
layout.addWidget(self._install_cap_btn)
layout.addStretch()
self.setLayout(layout)
parent.currentChanged.connect(self.tab_changed)
def install_cap(self):
path = settings.value('filepicker/path', None)
(cap, _) = QtGui.QFileDialog.getOpenFileName(self, m.select_cap,
path, "*.cap")
if not cap:
return
settings.setValue('filepicker/path', os.path.dirname(cap))
worker = QtCore.QCoreApplication.instance().worker
self._cap = os.path.basename(cap)
worker.post(m.installing, partial(self._neo.install_app, cap),
self.install_done)
@QtCore.Slot(object)
def install_done(self, status):
if status:
print status
QtGui.QMessageBox.warning(self, m.error_installing,
m.error_installing_1 % self._cap)
self.set_neo(self._neo)
def open_app(self, index):
readable = index.data()
aid = readable[readable.rindex('(') + 1:readable.rindex(')')]
appletmanager = QtCore.QCoreApplication.instance().appletmanager
self.applet.emit(appletmanager.get_applet(aid))
def tab_changed(self, index):
if index != self.index:
return
try:
while self._neo.locked:
try:
self._neo.unlock()
except Exception as e:
del self._neo.key
print e
pw, ok = get_text(self, m.key_required, m.key_required_desc)
if not ok:
self.parent.setCurrentIndex(0)
return
self._neo.key = pw
appletmanager = QtCore.QCoreApplication.instance().appletmanager
self._apps = filter(None, map(appletmanager.get_applet,
self._neo.list_apps()))
self._apps_list.model().setStringList(
map(lambda app: "%s (%s)" % (app.name, app.aid), self._apps))
except AttributeError:
pass
@QtCore.Slot(YubiKeyNeo)
def set_neo(self, neo):
self._neo = neo
if not neo or not neo.has_ccid:
self.parent.setTabEnabled(self.index, False)
self.parent.setTabToolTip(self.index, m.requires_ccid)
return
self.parent.setTabEnabled(self.index, True)
self.parent.setTabToolTip(self.index, None)
if neo.locked:
try:
neo.unlock()
except:
return
appletmanager = QtCore.QCoreApplication.instance().appletmanager
self._apps = filter(None, map(appletmanager.get_applet,
neo.list_apps()))
self._apps_list.model().setStringList(
map(lambda app: "%s (%s)" % (app.name, app.aid), self._apps))
|
data/Yelp/pyleus/pyleus/cli/build.py
|
"""Logic for building a jar from a pyleus topology directory.
Other modules should only call build_topology_jar passing it the configurations
object. The caller function should handle PyleusError exceptions.
"""
from __future__ import absolute_import
import glob
import logging
import os
import re
import shutil
import tempfile
import yaml
import zipfile
from pyleus import __version__
from pyleus.cli.topology_spec import TopologySpec
from pyleus.cli.virtualenv_proxy import VirtualenvProxy
from pyleus.compat import StringIO
from pyleus.storm.component import DESCRIBE_OPT
from pyleus.exception import InvalidTopologyError
from pyleus.exception import JarError
from pyleus.utils import expand_path
RESOURCES_PATH = "resources"
YAML_FILENAME = "pyleus_topology.yaml"
DEFAULT_REQUIREMENTS_FILENAME = "requirements.txt"
VIRTUALENV_NAME = "pyleus_venv"
log = logging.getLogger(__name__)
def _open_jar(base_jar):
"""Open the base jar file."""
if not os.path.exists(base_jar):
raise JarError("Base jar not found")
if not zipfile.is_zipfile(base_jar):
raise JarError("Base jar is not a jar file")
zip_file = zipfile.ZipFile(base_jar, "r")
return zip_file
def _zip_dir(src, arc):
"""Build a zip archive from the specified src.
Note: If the archive already exists, files will be simply
added to it, but the original archive will not be replaced.
"""
src_re = re.compile(src + "/*")
for root, dirs, files in os.walk(src):
prefix = re.sub(src_re, "", root)
for f in files:
arc.write(os.path.join(root, f), os.path.join(prefix, f),
zipfile.ZIP_DEFLATED)
def _pack_jar(tmp_dir, output_jar):
"""Build a jar from the temporary directory."""
zf = zipfile.ZipFile(output_jar, "w")
try:
_zip_dir(tmp_dir, zf)
finally:
zf.close()
def _validate_venv(topology_dir, venv):
"""Ensure that VIRTUALENV does not exist inside the directory"""
if os.path.exists(venv):
raise InvalidTopologyError("Topology directory must not contain a "
"file named {0}".format(venv))
def _path_contained_by(containing_path, path):
"""Return whether path is a subpath of containing_path"""
real_containing_path = os.path.join(os.path.realpath(containing_path), '')
real_path = os.path.realpath(path)
common_prefix = os.path.commonprefix([real_containing_path, real_path])
return common_prefix == real_containing_path
def _remove_pyleus_base_jar(venv):
"""Remove the Pyleus base jar from the virtualenv since it's redundant and
takes up space. See PYLEUS-74.
This function verifies that base_jar_path is actually inside the virtualenv
before removing it. If the user is using --system-site-packages and has
pyleus installed on their system, base_jar_path is actually outside the
virtualenv, and we don't want to attempt its removal in that case.
"""
base_jar_path = venv.execute_module("pyleus._base_jar",
cwd=venv.path).strip()
if _path_contained_by(venv.path, base_jar_path):
os.remove(base_jar_path)
def _set_up_virtualenv(venv_name, tmp_dir, req,
include_packages, system_site_packages,
pypi_index_url, python_interpreter, verbose):
"""Create a virtualenv with the specified options and the default packages
specified in configuration. Then run `pip install -r [requirements file]`.
"""
venv = VirtualenvProxy(
os.path.join(tmp_dir, venv_name),
system_site_packages=system_site_packages,
pypi_index_url=pypi_index_url,
python_interpreter=python_interpreter,
verbose=verbose
)
packages = ["pyleus=={0}".format(__version__)]
if include_packages is not None:
packages += include_packages
for package in packages:
venv.install_package(package)
if req is not None:
venv.install_from_requirements(req)
_remove_pyleus_base_jar(venv)
return venv
def _assemble_full_topology_yaml(spec, venv, resources_dir):
"""Assemble a full version of the topology yaml file given by the user
adding to it the information coming from the python source files.
"""
for component in spec.topology:
if component.type == "python":
log.debug('Assemble component module: {0}'.format(component.module))
description = venv.execute_module(module=component.module,
args=[DESCRIBE_OPT],
cwd=resources_dir)
module_spec = yaml.load(description)
component.update_from_module(module_spec)
spec.verify_groupings()
new_yaml = StringIO()
yaml.dump(spec.asdict(), new_yaml)
return new_yaml.getvalue()
def _content_to_copy(src, exclude):
"""Return a set of top-level content to copy, excluding exact matches
from the exclude list.
"""
content = set(glob.glob(os.path.join(src, "*")))
content -= set(exclude)
return content
def _copy_dir_content(src, dst, exclude):
"""Copy the content of a directory excluding the yaml file
and requirements file.
This functions is used instead of shutil.copytree() because
the latter always creates a top level directory, while only
the content need to be copied in this case.
"""
content = _content_to_copy(src, exclude)
for t in content:
if os.path.isdir(t):
shutil.copytree(t, os.path.join(dst, os.path.basename(t)),
symlinks=True)
else:
shutil.copy2(t, dst)
def _create_pyleus_jar(original_topology_spec, topology_dir, base_jar,
output_jar, zip_file, tmp_dir, include_packages,
system_site_packages, pypi_index_url, verbose):
"""Coordinate the creation of the the topology JAR:
- Validate the topology
- Extract the base JAR into a temporary directory
- Copy all source files into the directory
- If using virtualenv, create it and install dependencies
- Re-pack the temporary directory into the final JAR
"""
requirements_filename = original_topology_spec.requirements_filename
if not requirements_filename:
requirements_filename = DEFAULT_REQUIREMENTS_FILENAME
python_interpreter = original_topology_spec.python_interpreter
venv = os.path.join(topology_dir, VIRTUALENV_NAME)
req = os.path.join(topology_dir, requirements_filename)
if not os.path.isfile(req):
req = None
_validate_venv(topology_dir, venv)
zip_file.extractall(tmp_dir)
resources_dir = os.path.join(tmp_dir, RESOURCES_PATH)
os.mkdir(resources_dir)
_copy_dir_content(
src=topology_dir,
dst=resources_dir,
exclude=[venv, req, output_jar],
)
venv = _set_up_virtualenv(
venv_name=VIRTUALENV_NAME,
tmp_dir=resources_dir,
req=req,
include_packages=include_packages,
system_site_packages=system_site_packages,
pypi_index_url=pypi_index_url,
python_interpreter=python_interpreter,
verbose=verbose)
new_yaml = _assemble_full_topology_yaml(
spec=original_topology_spec,
venv=venv,
resources_dir=resources_dir)
jar_yaml = os.path.join(resources_dir, YAML_FILENAME)
with open(jar_yaml, 'w') as f:
f.write(new_yaml)
_pack_jar(tmp_dir, output_jar)
def _build_output_path(output_arg, topology_name):
"""Return the absolute path of the output jar file.
Default basename:
TOPOLOGY_DIRECTORY.jar
"""
if output_arg is not None:
return expand_path(output_arg)
else:
return expand_path(topology_name + ".jar")
def parse_original_topology(topology_path):
with open(topology_path) as f:
yaml_spec = yaml.load(f)
return TopologySpec(yaml_spec)
def build_topology_jar(configs):
"""Parse command-line arguments and invoke _create_pyleus_jar()"""
topology_path = expand_path(configs.topology_path)
topology_dir = expand_path(os.path.dirname(topology_path))
base_jar = expand_path(configs.base_jar)
original_topology_spec = parse_original_topology(topology_path)
output_jar = _build_output_path(configs.output_jar,
original_topology_spec.name)
include_packages = None
if configs.include_packages is not None:
include_packages = configs.include_packages.split(" ")
zip_file = _open_jar(base_jar)
try:
tmp_dir = tempfile.mkdtemp()
try:
_create_pyleus_jar(
original_topology_spec=original_topology_spec,
topology_dir=topology_dir,
base_jar=base_jar,
output_jar=output_jar,
zip_file=zip_file,
tmp_dir=tmp_dir,
include_packages=include_packages,
system_site_packages=configs.system_site_packages,
pypi_index_url=configs.pypi_index_url,
verbose=configs.verbose,
)
finally:
shutil.rmtree(tmp_dir)
finally:
zip_file.close()
|
data/IanLewis/kay/kay/utils/jinja2utils/compiler.py
|
"""
gaefy.jinja2.compiler
~~~~~~~~~~~~~~~~~~~~~
Helper functions to parse Jinja2 templates and store them as Python code.
The compiled templates can be loaded using gaefy.jinja2.code_loaders,
avoiding all the parsing process.
To compile a whole dir:
from jinja2 import Environment
from gaefy.jinja2.compiler import compile_dir
env = Environment(extensions=['jinja2.ext.i18n'])
src_path = '/path/to/templates'
dst_path = '/path/to/templates_compiled'
compile_dir(env, src_path, dst_path)
:copyright: 2009 by tipfy.org.
:license: BSD, see LICENSE.txt for more details.
"""
import re
import sys
from os import path, listdir, mkdir
def compile_file(env, src_path, dst_path, encoding='utf-8', base_dir=''):
"""Compiles a Jinja2 template to python code.
Params:
`env`: a Jinja2 Environment instance.
`src_path`: path to the source file.
`dst_path`: path to the destination file.
`encoding`: template encoding.
`base_dir`: the base path to be removed from the compiled template
filename.
"""
src_file = file(src_path, 'r')
try:
source = src_file.read().decode(encoding)
except Exception, e:
sys.stderr.write("Failed compiling %s. Perhaps you can check the character"
" set of this file.\n" % src_path)
raise
src_file.close()
name = src_path.replace(base_dir, '')
raw = env.compile(source, name=name, filename=name, raw=True)
dst_file = open(dst_path, 'wb')
dst_file.write(raw)
dst_file.close()
def compile_dir(env, src_path, dst_path, pattern=r'^[^\.].*\..*[^~]$',
encoding='utf-8', base_dir=None,
negative_pattern=r'^.*\.swp$'):
"""Compiles a directory of Jinja2 templates to python code.
Params:
`env`: a Jinja2 Environment instance.
`src_path`: path to the source directory.
`dst_path`: path to the destination directory.
`encoding`: template encoding.
`pattern`: a regular expression to match template file names.
`base_dir`: the base path to be removed from the compiled template
filename.
"""
if base_dir is None:
base_dir = src_path
for filename in listdir(src_path):
if filename.startswith("."):
continue
src_name = path.join(src_path, filename)
dst_name = path.join(dst_path, filename)
if path.isdir(src_name):
if not path.isdir(dst_name):
mkdir(dst_name)
compile_dir(env, src_name, dst_name, encoding=encoding,
base_dir=base_dir)
elif path.isfile(src_name) and re.match(pattern, filename) and \
not re.match(negative_pattern, filename):
compile_file(env, src_name, dst_name, encoding=encoding,
base_dir=base_dir)
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/api.py
|
"""
.. _`openmdao.lib.casehandler.api.py`:
A central place to access all of the OpenMDAO case recorders, case
iterators, and case filters in the standard library.
"""
from openmdao.lib.casehandlers.caseset import CaseArray, CaseSet, caseiter_to_caseset
from openmdao.lib.casehandlers.csvcase import CSVCaseIterator, CSVCaseRecorder
from openmdao.lib.casehandlers.dbcase import DBCaseIterator, DBCaseRecorder, \
case_db_to_dict
from openmdao.lib.casehandlers.dumpcase import DumpCaseRecorder
from openmdao.lib.casehandlers.jsoncase import JSONCaseRecorder, \
BSONCaseRecorder, verify_json
from openmdao.lib.casehandlers.listcase import ListCaseRecorder, \
ListCaseIterator
from openmdao.lib.casehandlers.caseset import CaseArray, CaseSet, \
caseiter_to_caseset
from openmdao.lib.casehandlers.filters import SequenceCaseFilter, \
SliceCaseFilter, ExprCaseFilter
from openmdao.lib.casehandlers.query import CaseDataset
from openmdao.lib.casehandlers.csv_post_processor import caseset_query_to_csv
from openmdao.lib.casehandlers.dump_post_processor import caseset_query_dump
from openmdao.lib.casehandlers.html_post_processor import caseset_query_to_html
try:
from openmdao.lib.casehandlers.query_hdf5 import CaseDatasetHDF5
from openmdao.lib.casehandlers.hdf5case import HDF5CaseRecorder
except ImportError:
pass
|
data/Lispython/human_curl/debug.py
|
"""
human_curl.debug
~~~~~~~~~~~~~~~~~~~~~~~~~~
Debuggging tests for human_curl
:copyright: (c) 2011 by Alexandr Lispython (alex@obout.ru).
:license: BSD, see LICENSE for more details.
"""
import logging
from .tests import *
logger = logging.getLogger("human_curl")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(levelname)s %(asctime)s %(module)s [%(lineno)d] %(process)d %(thread)d | %(message)s ")
handler.setFormatter(formatter)
logger.addHandler(handler)
|
data/PressLabs/zipa/examples/iterator_filter.py
|
from zipa import api_github_com as github
repos = github.orgs.django.repos
for repo in repos[{'sort': 'created', 'direction': 'desc'}]:
print repo.name
|
data/Relrin/aiorest-ws/aiorest_ws/status.py
|
"""
WebSocket status codes and functions for work with them.
For more details check the link below:
https://tools.ietf.org/html/rfc6455
"""
__all__ = (
'WS_NORMAL', 'WS_GOING_AWAY', 'WS_PROTOCOL_ERROR',
'WS_DATA_CANNOT_ACCEPT', 'WS_RESERVED', 'WS_NO_STATUS_CODE',
'WS_CLOSED_ABNORMALLY', 'WS_MESSAGE_NOT_CONSISTENT',
'WS_MESSAGE_VIOLATE_POLICY', 'WS_MESSAGE_TOO_BIG',
'WS_SERVER_DIDNT_RETURN_EXTENSIONS', 'WS_UNEXPECTED_CONDITION',
'WS_FAILURE_TLS',
'is_not_used', 'is_reserved', 'is_library', 'is_private',
)
WS_NORMAL = 1000
WS_GOING_AWAY = 1001
WS_PROTOCOL_ERROR = 1002
WS_DATA_CANNOT_ACCEPT = 1003
WS_RESERVED = 1004
WS_NO_STATUS_CODE = 1005
WS_CLOSED_ABNORMALLY = 1006
WS_MESSAGE_NOT_CONSISTENT = 1007
WS_MESSAGE_VIOLATE_POLICY = 1008
WS_MESSAGE_TOO_BIG = 1009
WS_SERVER_DIDNT_RETURN_EXTENSIONS = 1010
WS_UNEXPECTED_CONDITION = 1011
WS_FAILURE_TLS = 1015
def is_not_used(code):
"""Checking code, that is unused.
:param code: integer value.
"""
return 0 <= code <= 999
def is_reserved(code):
"""Checking code, that is reserved.
:param code: integer value.
"""
return 1000 <= code <= 2999
def is_library(code):
"""Checking code, that is value, used by libraries.
:param code: integer value.
"""
return 3000 <= code <= 3999
def is_private(code):
"""Checking code, that is private code.
:param code: integer value.
"""
return 4000 <= code <= 4999
|
data/Littel-Laboratory/homes-dataset-tools/imageKit/train_imagenet.py
|
"""Example code of learning a large scale convnet from ILSVRC2012 dataset.
Prerequisite: To run this example, crop the center of ILSVRC2012 training and
validation images and scale them to 256x256, and make two lists of space-
separated CSV whose first column is full path to image and second column is
zero-origin label (this format is same as that used by Caffe's ImageDataLayer).
"""
from __future__ import print_function
import argparse
import datetime
import json
import multiprocessing
import os
import random
import sys
import threading
import time
import numpy as np
from PIL import Image
import six
import six.moves.cPickle as pickle
from six.moves import queue
import chainer
from chainer import computational_graph
from chainer import cuda
from chainer import optimizers
from chainer import serializers
parser = argparse.ArgumentParser(
description='Learning convnet from ILSVRC2012 dataset')
parser.add_argument('train', help='Path to training image-label list file')
parser.add_argument('val', help='Path to validation image-label list file')
parser.add_argument('--mean', '-m', default='mean.npy',
help='Path to the mean file (computed by compute_mean.py)')
parser.add_argument('--arch', '-a', default='nin',
help='Convnet architecture \
(nin, alex, alexbn, googlenet, googlenetbn)')
parser.add_argument('--batchsize', '-B', type=int, default=32,
help='Learning minibatch size')
parser.add_argument('--val_batchsize', '-b', type=int, default=250,
help='Validation minibatch size')
parser.add_argument('--epoch', '-E', default=10, type=int,
help='Number of epochs to learn')
parser.add_argument('--gpu', '-g', default=-1, type=int,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--loaderjob', '-j', default=20, type=int,
help='Number of parallel data loading processes')
parser.add_argument('--root', '-r', default='.',
help='Root directory path of image files')
parser.add_argument('--out', '-o', default='model',
help='Path to save model on each validation')
parser.add_argument('--outstate', '-s', default='state',
help='Path to save optimizer state on each validation')
parser.add_argument('--initmodel', default='',
help='Initialize the model from given file')
parser.add_argument('--resume', default='',
help='Resume the optimization from snapshot')
args = parser.parse_args()
if args.gpu >= 0:
cuda.check_cuda_available()
xp = cuda.cupy if args.gpu >= 0 else np
assert 50000 % args.val_batchsize == 0
def load_image_list(path, root):
tuples = []
for line in open(path):
pair = line.strip().split()
tuples.append((os.path.join(root, pair[0]), np.int32(pair[1])))
return tuples
train_list = load_image_list(args.train, args.root)
val_list = load_image_list(args.val, args.root)
mean_image = pickle.load(open(args.mean, 'rb'))
if args.arch == 'nin':
import nin
model = nin.NIN()
elif args.arch == 'alex':
import alex
model = alex.Alex()
elif args.arch == 'alexbn':
import alexbn
model = alexbn.AlexBN()
elif args.arch == 'googlenet':
import googlenet
model = googlenet.GoogLeNet()
elif args.arch == 'googlenetbn':
import googlenetbn
model = googlenetbn.GoogLeNetBN()
else:
raise ValueError('Invalid architecture name')
if args.gpu >= 0:
cuda.get_device(args.gpu).use()
model.to_gpu()
optimizer = optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(model)
if args.initmodel:
print('Load model from', args.initmodel)
serializers.load_hdf5(args.initmodel, model)
if args.resume:
print('Load optimizer state from', args.resume)
serializers.load_hdf5(args.resume, optimizer)
data_q = queue.Queue(maxsize=1)
res_q = queue.Queue()
cropwidth = 256 - model.insize
def read_image(path, center=False, flip=False):
image = np.asarray(Image.open(path)).transpose(2, 0, 1)
if center:
top = left = cropwidth / 2
else:
top = random.randint(0, cropwidth - 1)
left = random.randint(0, cropwidth - 1)
bottom = model.insize + top
right = model.insize + left
image = image[:, top:bottom, left:right].astype(np.float32)
image -= mean_image[:, top:bottom, left:right]
image /= 255
if flip and random.randint(0, 1) == 0:
return image[:, :, ::-1]
else:
return image
def feed_data():
i = 0
count = 0
x_batch = np.ndarray(
(args.batchsize, 3, model.insize, model.insize), dtype=np.float32)
y_batch = np.ndarray((args.batchsize,), dtype=np.int32)
val_x_batch = np.ndarray(
(args.val_batchsize, 3, model.insize, model.insize), dtype=np.float32)
val_y_batch = np.ndarray((args.val_batchsize,), dtype=np.int32)
batch_pool = [None] * args.batchsize
val_batch_pool = [None] * args.val_batchsize
pool = multiprocessing.Pool(args.loaderjob)
data_q.put('train')
for epoch in six.moves.range(1, 1 + args.epoch):
print('epoch', epoch, file=sys.stderr)
print('learning rate', optimizer.lr, file=sys.stderr)
perm = np.random.permutation(len(train_list))
for idx in perm:
path, label = train_list[idx]
batch_pool[i] = pool.apply_async(read_image, (path, False, True))
y_batch[i] = label
i += 1
if i == args.batchsize:
for j, x in enumerate(batch_pool):
x_batch[j] = x.get()
data_q.put((x_batch.copy(), y_batch.copy()))
i = 0
count += 1
if count % 1000 == 0:
data_q.put('val')
j = 0
for path, label in val_list:
val_batch_pool[j] = pool.apply_async(
read_image, (path, True, False))
val_y_batch[j] = label
j += 1
if j == args.val_batchsize:
for k, x in enumerate(val_batch_pool):
val_x_batch[k] = x.get()
data_q.put((val_x_batch.copy(), val_y_batch.copy()))
j = 0
data_q.put('train')
optimizer.lr *= 0.97
pool.close()
pool.join()
data_q.put('end')
def log_result():
train_count = 0
train_cur_loss = 0
train_cur_accuracy = 0
begin_at = time.time()
val_begin_at = None
while True:
result = res_q.get()
if result == 'end':
print(file=sys.stderr)
break
elif result == 'train':
print(file=sys.stderr)
train = True
if val_begin_at is not None:
begin_at += time.time() - val_begin_at
val_begin_at = None
continue
elif result == 'val':
print(file=sys.stderr)
train = False
val_count = val_loss = val_accuracy = 0
val_begin_at = time.time()
continue
loss, accuracy = result
if train:
train_count += 1
duration = time.time() - begin_at
throughput = train_count * args.batchsize / duration
sys.stderr.write(
'\rtrain {} updates ({} samples) time: {} ({} images/sec)'
.format(train_count, train_count * args.batchsize,
datetime.timedelta(seconds=duration), throughput))
train_cur_loss += loss
train_cur_accuracy += accuracy
if train_count % 1000 == 0:
mean_loss = train_cur_loss / 1000
mean_error = 1 - train_cur_accuracy / 1000
print(file=sys.stderr)
print(json.dumps({'type': 'train', 'iteration': train_count,
'error': mean_error, 'loss': mean_loss}))
sys.stdout.flush()
train_cur_loss = 0
train_cur_accuracy = 0
else:
val_count += args.val_batchsize
duration = time.time() - val_begin_at
throughput = val_count / duration
sys.stderr.write(
'\rval {} batches ({} samples) time: {} ({} images/sec)'
.format(val_count / args.val_batchsize, val_count,
datetime.timedelta(seconds=duration), throughput))
val_loss += loss
val_accuracy += accuracy
if val_count == 50000:
mean_loss = val_loss * args.val_batchsize / 50000
mean_error = 1 - val_accuracy * args.val_batchsize / 50000
print(file=sys.stderr)
print(json.dumps({'type': 'val', 'iteration': train_count,
'error': mean_error, 'loss': mean_loss}))
sys.stdout.flush()
def train_loop():
graph_generated = False
while True:
while data_q.empty():
time.sleep(0.1)
inp = data_q.get()
if inp == 'end':
res_q.put('end')
break
elif inp == 'train':
res_q.put('train')
model.train = True
continue
elif inp == 'val':
res_q.put('val')
serializers.save_hdf5(args.out, model)
serializers.save_hdf5(args.outstate, optimizer)
model.train = False
continue
volatile = 'off' if model.train else 'on'
x = chainer.Variable(xp.asarray(inp[0]), volatile=volatile)
t = chainer.Variable(xp.asarray(inp[1]), volatile=volatile)
if model.train:
optimizer.update(model, x, t)
if not graph_generated:
with open('graph.dot', 'w') as o:
o.write(computational_graph.build_computational_graph(
(model.loss,)).dump())
print('generated graph', file=sys.stderr)
graph_generated = True
else:
model(x, t)
res_q.put((float(model.loss.data), float(model.accuracy.data)))
del x, t
feeder = threading.Thread(target=feed_data)
feeder.daemon = True
feeder.start()
logger = threading.Thread(target=log_result)
logger.daemon = True
logger.start()
train_loop()
feeder.join()
logger.join()
serializers.save_hdf5(args.out, model)
serializers.save_hdf5(args.outstate, optimizer)
|
data/WatchPeopleCode/WatchPeopleCode/wpc/forms.py
|
from wpc.models import Subscriber, Streamer, YoutubeChannel, YoutubeStream
from wpc.flask_utils import get_or_create
from utils import youtube_video_id
from flask_wtf import Form
from wtforms import StringField, SubmitField, validators, TextAreaField
from wtforms.validators import ValidationError
from flask.ext.login import current_user
from urlparse import urlparse
import re
def validate_email_unique(form, field):
email = field.data
if Subscriber.query.filter_by(email=email).first() is not None:
raise ValidationError('This email is already in the database.')
class SubscribeForm(Form):
email = StringField("Email address", [validators.DataRequired(), validators.Email(), validate_email_unique])
submit_button = SubmitField('Subscribe!')
class GLMSubscribeForm(Form):
email = StringField("Email address", [validators.DataRequired(), validators.Email()])
submit_button = SubmitField('Subscribe')
class DashboardEmailForm(Form):
email = StringField("Email address", [validators.DataRequired(), validators.Email()])
submit_button = SubmitField('Update')
def prepopulate(self, streamer):
if streamer.as_subscriber:
self.email.data = streamer.as_subscriber.email
class DashboardAddVideoForm(Form):
link = StringField("YouTube link", [validators.DataRequired()])
submit_button = SubmitField('Add video to the archive')
def validate_link(form, field):
ytid = youtube_video_id(field.data)
if not ytid:
raise ValidationError("Invalid YouTube URL")
existing_stream = YoutubeStream.query.filter_by(ytid=ytid).first()
if existing_stream and existing_stream.streamer:
raise ValidationError("This video is already added by {}".format(existing_stream.streamer.reddit_username))
class IdeaForm(Form):
description = TextAreaField("Streamers need your ideas. What kind of streams would you like to see here?", [validators.DataRequired()])
submit_button = SubmitField('Submit your idea')
class EditStreamTitleForm(Form):
title = StringField("Title", [validators.Length(max=200)])
submit_button = SubmitField('Submit')
class RtmpRedirectForm(Form):
rtmp_redirect_1 = StringField("RTMP Redirect
rtmp_redirect_2 = StringField("RTMP Redirect
rtmp_redirect_3 = StringField("RTMP Redirect
submit_button = SubmitField('Save')
def prepopulate(self, streamer):
for rid in xrange(1, 4):
attrname = 'rtmp_redirect_{}'.format(rid)
getattr(self, attrname).data = getattr(streamer, attrname)
class EditStreamerInfoForm(Form):
youtube_channel = StringField("YouTube channel", [validators.Length(max=100)])
twitch_channel = StringField("Twitch channel", [validators.Length(max=100)])
info = TextAreaField("Info", [validators.Length(max=5000)])
submit_button = SubmitField('Submit')
def twitch_channel_extract(self):
"""
Examples:
- channel_name
- https://www.twitch.tv.channel_name
- something_wrong?!twitch.tv/channel_name
"""
string = self.twitch_channel.data.strip()
position = string.find('twitch.tv')
if position != -1:
path = urlparse(string[position:]).path.split('/')
if len(path) < 2:
return None
string = path[1]
return string if len(string) <= 25 and re.match(r'\w*$', string) else None
def youtube_channel_extract(self):
"""
Examples:
- UCJAVLOqT6Mgn_YD5lAxxkUA
- https://www.youtube.com/channel/UCJAVLOqT6Mgn_YD5lAxxkUA
- something_wrong}[youtube.com/channel/UCJAVLOqT6Mgn_YD5lAxxkUA
"""
string = self.youtube_channel.data.strip()
position = string.find('youtube.com')
if position != -1:
path = urlparse(string[position:]).path.split('/')
if len(path) < 3 or path[1] != "channel":
return None
else:
string = path[2]
return string if len(string) == 24 and re.match(r'[\w-]*$', string) or string == '' else None
def validate_youtube_channel(form, field):
yc = form.youtube_channel_extract()
if yc is None:
raise ValidationError("This field should contain a valid YouTube channel.")
streamer = get_or_create(YoutubeChannel, channel_id=yc).streamer
if streamer and streamer.checked and streamer != current_user:
raise ValidationError("There is another user with this channel. If it is your channel, please message about that to /r/WatchPeoplecode moderators.")
def validate_twith_channel(form, field):
tc = form.twitch_channel_extract()
if tc is None:
raise ValidationError('This field should contain a valid Twitch channel.')
streamer = Streamer.query.filter_by(twitch_channel=tc).first()
if streamer and streamer.checked and streamer != current_user:
raise ValidationError("There is another user with this channel. If it is your channel, please message about that to /r/WatchPeoplecode moderators.")
class SearchForm(Form):
query = StringField("Search")
search_button = SubmitField('Search video archive')
|
data/Jackeriss/Email_My_PC/shell/demos/servers/column_provider.py
|
import sys, os, stat
import pythoncom
from win32com.shell import shell, shellcon
import commctrl
import winerror
from win32com.server.util import wrap
from pywintypes import IID
IPersist_Methods = ["GetClassID"]
IColumnProvider_Methods = IPersist_Methods + \
["Initialize", "GetColumnInfo", "GetItemData"]
class ColumnProvider:
_reg_progid_ = "Python.ShellExtension.ColumnProvider"
_reg_desc_ = "Python Sample Shell Extension (Column Provider)"
_reg_clsid_ = IID("{0F14101A-E05E-4070-BD54-83DFA58C3D68}")
_com_interfaces_ = [pythoncom.IID_IPersist,
shell.IID_IColumnProvider,
]
_public_methods_ = IColumnProvider_Methods
def GetClassID(self):
return self._reg_clsid_
def Initialize(self, colInit):
flags, reserved, name = colInit
print "ColumnProvider initializing for file", name
def GetColumnInfo(self, index):
if index in [0,1]:
if index==0:
ext = ".pyc"
else:
ext = ".pyo"
title = ext + " size"
description = "Size of compiled %s file" % ext
col_id = (self._reg_clsid_,
index)
col_info = (
col_id,
pythoncom.VT_I4,
commctrl.LVCFMT_RIGHT,
20,
shellcon.SHCOLSTATE_TYPE_INT | \
shellcon.SHCOLSTATE_SECONDARYUI,
title,
description)
return col_info
return None
def GetItemData(self, colid, colData):
fmt_id, pid = colid
fmt_id==self._reg_clsid_
flags, attr, reserved, ext, name = colData
if ext.lower() not in [".py", ".pyw"]:
return None
if pid==0:
ext = ".pyc"
else:
ext = ".pyo"
check_file = os.path.splitext(name)[0] + ext
try:
st = os.stat(check_file)
return st[stat.ST_SIZE]
except OSError:
return None
def DllRegisterServer():
import _winreg
key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
"Folder\\ShellEx\\ColumnHandlers\\" + \
str(ColumnProvider._reg_clsid_ ))
_winreg.SetValueEx(key, None, 0, _winreg.REG_SZ, ColumnProvider._reg_desc_)
print ColumnProvider._reg_desc_, "registration complete."
def DllUnregisterServer():
import _winreg
try:
key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
"Folder\\ShellEx\\ColumnHandlers\\" + \
str(ColumnProvider._reg_clsid_) )
except WindowsError, details:
import errno
if details.errno != errno.ENOENT:
raise
print ColumnProvider._reg_desc_, "unregistration complete."
if __name__=='__main__':
from win32com.server import register
register.UseCommandLine(ColumnProvider,
finalize_register = DllRegisterServer,
finalize_unregister = DllUnregisterServer)
|
data/TheTorProject/gettor/process_tweets.py
|
import sys
import logging
import gettor.twitter
def main():
logging_level = 'DEBUG'
logging_file = '/home/ilv/Proyectos/tor/gettor/log/process_tweets.log'
logging_format = '[%(levelname)s] %(asctime)s - %(message)s'
date_format = "%Y-%m-%d"
logging.basicConfig(
format=logging_format,
datefmt=date_format,
filename = logging_file,
level = logging_level
)
logging.debug("Starting bot")
try:
bot = gettor.twitter.TwitterBot()
bot.start()
except gettor.twitter.ConfigError as e:
logging.error("Configuration error: %s" % str(e))
except gettor.twitter.InternalError as e:
logging.error("Core module not working: %s" % str(e))
except Exception as e:
logging.error("Unexpected error: %s" % str(e))
if __name__ == '__main__':
main()
|
data/QuantEcon/QuantEcon.py/quantecon/models/__init__.py
|
raise ImportError("The code previously contained in the quantecon.models subpackage has been migrated to the QuantEcon.applications (https://github.com/QuantEcon/QuantEcon.applications) repo")
|
data/HearthSim/python-unitypack/unitypack/engine/renderer.py
|
from enum import IntEnum
from .component import Component
from .object import field
class ReflectionProbeUsage(IntEnum):
Off = 0
BlendProbes = 1
BlendProbesAndSkybox = 2
Simple = 3
class ShadowCastingMode(IntEnum):
Off = 0
On = 1
TwoSided = 2
ShadowsOnly = 3
class Renderer(Component):
enabled = field("m_Enabled", bool)
lightmap_index = field("m_LightmapIndex")
materials = field("m_Materials")
probe_anchor = field("m_ProbeAnchor")
receive_shadows = field("m_ReceiveShadows", bool)
reflection_probe_usage = field("m_ReflectionProbeUsage", ReflectionProbeUsage)
shadow_casting_mode = field("m_CastShadows", ShadowCastingMode)
sorting_layer_id = field("m_SortingLayerID")
sorting_order = field("m_SortingOrder")
use_light_probes = field("m_UseLightProbes", bool)
lightmap_index_dynamic = field("m_LightmapIndexDynamic")
lightmap_tiling_offset = field("m_LightmapTilingOffset")
lightmap_tiling_offset_dynamic = field("m_LightmapTilingOffsetDynamic")
static_batch_root = field("m_StaticBatchRoot")
subset_indices = field("m_SubsetIndices")
@property
def material(self):
return self.materials[0]
class ParticleSystemRenderMode(IntEnum):
Billboard = 0
Stretch = 1
HorizontalBillboard = 2
VerticalBillboard = 3
Mesh = 4
class ParticleSystemSortMode(IntEnum):
None_ = 0
Distance = 1
OldestInFront = 2
YoungestInFront = 3
class MeshRenderer(Component):
pass
class ParticleRenderer(Renderer):
camera_velocity_scale = field("m_CameraVelocityScale")
length_scale = field("m_LengthScale")
max_particle_size = field("m_MaxParticleSize")
velocity_scale = field("m_VelocityScale")
stretch_particles = field("m_StretchParticles")
uv_animation = field("UV Animation")
class ParticleSystemRenderer(Renderer):
camera_velocity_scale = field("m_CameraVelocityScale")
length_scale = field("m_LengthScale")
max_particle_size = field("m_MaxParticleSize")
mesh = field("m_Mesh")
mesh1 = field("m_Mesh1")
mesh2 = field("m_Mesh2")
mesh3 = field("m_Mesh3")
normal_direction = field("m_NormalDirection")
render_mode = field("m_RenderMode", ParticleSystemRenderMode)
sort_mode = field("m_SortMode", ParticleSystemSortMode)
sorting_fudge = field("m_SortingFudge")
velocity_scale = field("m_VelocityScale")
|
data/Sage-Bionetworks/synapsePythonClient/synapseclient/team.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .dict_object import DictObject
class UserProfile(DictObject):
def __init__(self, **kwargs):
super(UserProfile, self).__init__(kwargs)
class UserGroupHeader(DictObject):
def __init__(self, **kwargs):
super(UserGroupHeader, self).__init__(kwargs)
class Team(DictObject):
"""
Represent a `Synapse Team <http://rest.synapse.org/org/sagebionetworks/repo/model/Team.html>`_. User definable fields are:
:param icon: fileHandleId for icon image of the Team
:param description: A short description of this Team.
:param name: The name of the Team.
:param canPublicJoin: true for teams which members can join without an invitation or approval
"""
def __init__(self, **kwargs):
super(Team, self).__init__(kwargs)
@classmethod
def getURI(cls, id):
return '/team/%s' %id
def postURI(self):
return '/team'
def putURI(self):
return '/team'
def deleteURI(self):
return '/team/%s' %self.id
def getACLURI(self):
return '/team/%s/acl' %self.id
def putACLURI(self):
return '/team/acl'
class TeamMember(DictObject):
def __init__(self, **kwargs):
if 'member' in kwargs:
kwargs['member'] = UserGroupHeader(**kwargs['member'])
super(TeamMember, self).__init__(kwargs)
|
data/SteefH/django-pagination-plus/setup.py
|
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
README = read('README.rst')
setup(
name = "django-pagination-plus",
packages = find_packages(),
version = "0.0.3",
author = "Stefan van der Haven",
author_email = "stefan@steeffie.net",
url = "https://github.com/SteefH/django-pagination-plus",
description = "Utilities for pagination in Django templates",
long_description = README,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
keywords = ['django', 'pagination'],
)
|
data/adieu/allbuttonspressed/minicms/context_processors.py
|
from django.conf import settings
def cms(request):
return {
'site_name': settings.SITE_NAME,
'site_copyright': settings.SITE_COPYRIGHT,
}
|
data/PyTables/PyTables/tables/node.py
|
"""PyTables nodes."""
from __future__ import absolute_import
import warnings
import functools
from .registry import class_name_dict, class_id_dict
from .exceptions import (ClosedNodeError, NodeError, UndoRedoWarning,
PerformanceWarning)
from .path import join_path, split_path, isvisiblepath
from .utils import lazyattr
from .undoredo import move_to_shadow
from .attributeset import AttributeSet, NotLoggedAttributeSet
import six
__docformat__ = 'reStructuredText'
"""The format of documentation strings in this module."""
def _closedrepr(oldmethod):
"""Decorate string representation method to handle closed nodes.
If the node is closed, a string like this is returned::
<closed MODULE.CLASS at ADDRESS>
instead of calling `oldmethod` and returning its result.
"""
@functools.wraps(oldmethod)
def newmethod(self):
if not self._v_isopen:
cmod = self.__class__.__module__
cname = self.__class__.__name__
addr = hex(id(self))
return '<closed %s.%s at %s>' % (cmod, cname, addr)
return oldmethod(self)
return newmethod
class MetaNode(type):
"""Node metaclass.
This metaclass ensures that their instance classes get registered
into several dictionaries (namely the `tables.utils.class_name_dict`
class name dictionary and the `tables.utils.class_id_dict` class
identifier dictionary).
It also adds sanity checks to some methods:
* Check that the node is open when calling string representation
and provide a default string if so.
"""
def __new__(class_, name, bases, dict_):
for mname in ['__str__', '__repr__']:
if mname in dict_:
dict_[mname] = _closedrepr(dict_[mname])
return type.__new__(class_, name, bases, dict_)
def __init__(class_, name, bases, dict_):
super(MetaNode, class_).__init__(name, bases, dict_)
class_name_dict[class_.__name__] = class_
cid = getattr(class_, '_c_classid', None)
if cid is not None:
for base in bases:
pcid = getattr(base, '_c_classid', None)
if pcid == cid:
break
else:
class_id_dict[cid] = class_
class Node(six.with_metaclass(MetaNode, object)):
"""Abstract base class for all PyTables nodes.
This is the base class for *all* nodes in a PyTables hierarchy. It is an
abstract class, i.e. it may not be directly instantiated; however, every
node in the hierarchy is an instance of this class.
A PyTables node is always hosted in a PyTables *file*, under a *parent
group*, at a certain *depth* in the node hierarchy. A node knows its own
*name* in the parent group and its own *path name* in the file.
All the previous information is location-dependent, i.e. it may change when
moving or renaming a node in the hierarchy. A node also has
location-independent information, such as its *HDF5 object identifier* and
its *attribute set*.
This class gathers the operations and attributes (both location-dependent
and independent) which are common to all PyTables nodes, whatever their
type is. Nonetheless, due to natural naming restrictions, the names of all
of these members start with a reserved prefix (see the Group class
in :ref:`GroupClassDescr`).
Sub-classes with no children (e.g. *leaf nodes*) may define new methods,
attributes and properties to avoid natural naming restrictions. For
instance, _v_attrs may be shortened to attrs and _f_rename to
rename. However, the original methods and attributes should still be
available.
.. rubric:: Node attributes
.. attribute:: _v_depth
The depth of this node in the tree (an non-negative integer value).
.. attribute:: _v_file
The hosting File instance (see :ref:`FileClassDescr`).
.. attribute:: _v_name
The name of this node in its parent group (a string).
.. attribute:: _v_pathname
The path of this node in the tree (a string).
.. attribute:: _v_objectid
A node identifier (may change from run to run).
.. versionchanged:: 3.0
The *_v_objectID* attribute has been renamed into *_v_object_id*.
"""
_AttributeSet = AttributeSet
def _g_getparent(self):
"The parent :class:`Group` instance"
(parentpath, nodename) = split_path(self._v_pathname)
return self._v_file._get_node(parentpath)
_v_parent = property(_g_getparent)
@lazyattr
def _v_attrs(self):
"""The associated `AttributeSet` instance.
See Also
--------
tables.attributeset.AttributeSet : container for the HDF5 attributes
"""
return self._AttributeSet(self)
def _g_gettitle(self):
"A description of this node. A shorthand for TITLE attribute."
if hasattr(self._v_attrs, 'TITLE'):
return self._v_attrs.TITLE
else:
return ''
def _g_settitle(self, title):
self._v_attrs.TITLE = title
_v_title = property(_g_gettitle, _g_settitle)
_v_isopen = False
"""Whehter this node is open or not."""
def __init__(self, parentnode, name, _log=True):
if isinstance(parentnode, class_name_dict['SoftLink']):
parentnode = parentnode.dereference()
self._v_file = None
"""The hosting File instance (see :ref:`FileClassDescr`)."""
self._v_isopen = False
"""Whether this node is open or not."""
self._v_pathname = None
"""The path of this node in the tree (a string)."""
self._v_name = None
"""The name of this node in its parent group (a string)."""
self._v_depth = None
"""The depth of this node in the tree (an non-negative integer value).
"""
self._v_maxtreedepth = parentnode._v_file.params['MAX_TREE_DEPTH']
"""Maximum tree depth before warning the user.
.. versionchanged:: 3.0
Renamed into *_v_maxtreedepth* from *_v_maxTreeDepth*.
"""
self._v__deleting = False
"""Is the node being deleted?"""
self._v_objectid = None
"""A node identifier (may change from run to run).
.. versionchanged:: 3.0
The *_v_objectID* attribute has been renamed into *_v_objectid*.
"""
validate = new = self._v_new
self._g_check_group(parentnode)
parentnode._g_check_open()
file_ = parentnode._v_file
if new:
file_._check_writable()
if new:
parentnode._g_refnode(self, name, validate)
self._g_set_location(parentnode, name)
try:
self._g_new(parentnode, name, init=True)
if new:
self._v_objectid = self._g_create()
else:
self._v_objectid = self._g_open()
if new and _log and file_.is_undo_enabled():
self._g_log_create()
self._g_post_init_hook()
except:
self._f_close()
raise
def _g_log_create(self):
self._v_file._log('CREATE', self._v_pathname)
def __del__(self):
if not self._v_isopen:
return
self._v__deleting = True
try:
node_manager = self._v_file._node_manager
node_manager.drop_node(self, check_unregistered=False)
finally:
if self._v_isopen:
self._v__deleting = True
self._f_close()
def _g_pre_kill_hook(self):
"""Code to be called before killing the node."""
pass
def _g_create(self):
"""Create a new HDF5 node and return its object identifier."""
raise NotImplementedError
def _g_open(self):
"""Open an existing HDF5 node and return its object identifier."""
raise NotImplementedError
def _g_check_open(self):
"""Check that the node is open.
If the node is closed, a `ClosedNodeError` is raised.
"""
if not self._v_isopen:
raise ClosedNodeError("the node object is closed")
assert self._v_file.isopen, "found an open node in a closed file"
def _g_set_location(self, parentnode, name):
"""Set location-dependent attributes.
Sets the location-dependent attributes of this node to reflect
that it is placed under the specified `parentnode`, with the
specified `name`.
This also triggers the insertion of file references to this
node. If the maximum recommended tree depth is exceeded, a
`PerformanceWarning` is issued.
"""
file_ = parentnode._v_file
parentdepth = parentnode._v_depth
self._v_file = file_
self._v_isopen = True
root_uep = file_.root_uep
if name.startswith(root_uep):
assert parentdepth == 0
if root_uep == "/":
self._v_pathname = name
else:
self._v_pathname = name[len(root_uep):]
_, self._v_name = split_path(name)
self._v_depth = name.count("/") - root_uep.count("/") + 1
else:
self._v_name = name
self._v_pathname = join_path(parentnode._v_pathname, name)
self._v_depth = parentdepth + 1
if parentdepth >= self._v_maxtreedepth:
warnings.warn("""\
node ``%s`` is exceeding the recommended maximum depth (%d);\
be ready to see PyTables asking for *lots* of memory and possibly slow I/O"""
% (self._v_pathname, self._v_maxtreedepth),
PerformanceWarning)
if self._v_pathname != '/':
file_._node_manager.cache_node(self, self._v_pathname)
def _g_update_location(self, newparentpath):
"""Update location-dependent attributes.
Updates location data when an ancestor node has changed its
location in the hierarchy to `newparentpath`. In fact, this
method is expected to be called by an ancestor of this node.
This also triggers the update of file references to this node.
If the maximum recommended node depth is exceeded, a
`PerformanceWarning` is issued. This warning is assured to be
unique.
"""
oldpath = self._v_pathname
newpath = join_path(newparentpath, self._v_name)
newdepth = newpath.count('/')
self._v_pathname = newpath
self._v_depth = newdepth
if newdepth > self._v_maxtreedepth:
warnings.warn("""\
moved descendent node is exceeding the recommended maximum depth (%d);\
be ready to see PyTables asking for *lots* of memory and possibly slow I/O"""
% (self._v_maxtreedepth,), PerformanceWarning)
node_manager = self._v_file._node_manager
node_manager.rename_node(oldpath, newpath)
self._g_update_dependent()
def _g_del_location(self):
"""Clear location-dependent attributes.
This also triggers the removal of file references to this node.
"""
node_manager = self._v_file._node_manager
pathname = self._v_pathname
if not self._v__deleting:
node_manager.drop_from_cache(pathname)
node_manager.registry.pop(pathname, None)
self._v_file = None
self._v_isopen = False
self._v_pathname = None
self._v_name = None
self._v_depth = None
def _g_post_init_hook(self):
"""Code to be run after node creation and before creation logging."""
pass
def _g_update_dependent(self):
"""Update dependent objects after a location change.
All dependent objects (but not nodes!) referencing this node
must be updated here.
"""
if '_v_attrs' in self.__dict__:
self._v_attrs._g_update_node_location(self)
def _f_close(self):
"""Close this node in the tree.
This releases all resources held by the node, so it should not
be used again. On nodes with data, it may be flushed to disk.
You should not need to close nodes manually because they are
automatically opened/closed when they are loaded/evicted from
the integrated LRU cache.
"""
if not self._v_isopen:
return
myDict = self.__dict__
if '_v_attrs' in myDict:
self._v_attrs._g_close()
self._g_del_location()
myDict.clear()
self._v_isopen = False
def _g_remove(self, recursive, force):
"""Remove this node from the hierarchy.
If the node has children, recursive removal must be stated by
giving `recursive` a true value; otherwise, a `NodeError` will
be raised.
If `force` is set to true, the node will be removed no matter it
has children or not (useful for deleting hard links).
It does not log the change.
"""
parent = self._v_parent
parent._g_unrefnode(self._v_name)
self._f_close()
self._g_delete(parent)
def _f_remove(self, recursive=False, force=False):
"""Remove this node from the hierarchy.
If the node has children, recursive removal must be stated by giving
recursive a true value; otherwise, a NodeError will be raised.
If the node is a link to a Group object, and you are sure that you want
to delete it, you can do this by setting the force flag to true.
"""
self._g_check_open()
file_ = self._v_file
file_._check_writable()
if file_.is_undo_enabled():
self._g_remove_and_log(recursive, force)
else:
self._g_remove(recursive, force)
def _g_remove_and_log(self, recursive, force):
file_ = self._v_file
oldpathname = self._v_pathname
file_._log('REMOVE', oldpathname)
move_to_shadow(file_, oldpathname)
def _g_move(self, newparent, newname):
"""Move this node in the hierarchy.
Moves the node into the given `newparent`, with the given
`newname`.
It does not log the change.
"""
oldparent = self._v_parent
oldname = self._v_name
oldpathname = self._v_pathname
newparent._g_refnode(self, newname)
oldparent._g_unrefnode(oldname)
self._g_del_location()
self._g_set_location(newparent, newname)
self._g_new(newparent, self._v_name, init=False)
self._v_parent._g_move_node(oldparent._v_objectid, oldname,
newparent._v_objectid, newname,
oldpathname, self._v_pathname)
self._g_update_dependent()
def _f_rename(self, newname, overwrite=False):
"""Rename this node in place.
Changes the name of a node to *newname* (a string). If a node with the
same newname already exists and overwrite is true, recursively remove
it before renaming.
"""
self._f_move(newname=newname, overwrite=overwrite)
def _f_move(self, newparent=None, newname=None,
overwrite=False, createparents=False):
"""Move or rename this node.
Moves a node into a new parent group, or changes the name of the
node. newparent can be a Group object (see :ref:`GroupClassDescr`) or a
pathname in string form. If it is not specified or None, the current
parent group is chosen as the new parent. newname must be a string
with a new name. If it is not specified or None, the current name is
chosen as the new name. If createparents is true, the needed groups for
the given new parent group path to exist will be created.
Moving a node across databases is not allowed, nor it is moving a node
*into* itself. These result in a NodeError. However, moving a node
*over* itself is allowed and simply does nothing. Moving over another
existing node is similarly not allowed, unless the optional overwrite
argument is true, in which case that node is recursively removed before
moving.
Usually, only the first argument will be used, effectively moving the
node to a new location without changing its name. Using only the
second argument is equivalent to renaming the node in place.
"""
self._g_check_open()
file_ = self._v_file
oldparent = self._v_parent
oldname = self._v_name
if newparent is None and newname is None:
raise NodeError("you should specify at least "
"a ``newparent`` or a ``newname`` parameter")
if newparent is None:
newparent = oldparent
if newname is None:
newname = oldname
if hasattr(newparent, '_v_file'):
newfile = newparent._v_file
newpath = newparent._v_pathname
elif hasattr(newparent, 'startswith'):
newfile = file_
newpath = newparent
else:
raise TypeError("new parent is not a node nor a path: %r"
% (newparent,))
if newfile is not file_:
raise NodeError("nodes can not be moved across databases; "
"please make a copy of the node")
file_._check_writable()
oldpath = oldparent._v_pathname
if newpath == oldpath and newname == oldname:
return
self._g_check_not_contains(newpath)
newparent = file_._get_or_create_path(newparent, createparents)
self._g_check_group(newparent)
self._g_maybe_remove(newparent, newname, overwrite)
oldpathname = self._v_pathname
self._g_move(newparent, newname)
if file_.is_undo_enabled():
self._g_log_move(oldpathname)
def _g_log_move(self, oldpathname):
self._v_file._log('MOVE', oldpathname, self._v_pathname)
def _g_copy(self, newparent, newname, recursive, _log=True, **kwargs):
"""Copy this node and return the new one.
Creates and returns a copy of the node in the given `newparent`,
with the given `newname`. If `recursive` copy is stated, all
descendents are copied as well. Additional keyword argumens may
affect the way that the copy is made. Unknown arguments must be
ignored. On recursive copies, all keyword arguments must be
passed on to the children invocation of this method.
If `_log` is false, the change is not logged. This is *only*
intended to be used by ``_g_copy_as_child()`` as a means of
optimising sub-tree copies.
"""
raise NotImplementedError
def _g_copy_as_child(self, newparent, **kwargs):
"""Copy this node as a child of another group.
Copies just this node into `newparent`, not recursing children
nor overwriting nodes nor logging the copy. This is intended to
be used when copying whole sub-trees.
"""
return self._g_copy(newparent, self._v_name,
recursive=False, _log=False, **kwargs)
def _f_copy(self, newparent=None, newname=None,
overwrite=False, recursive=False, createparents=False,
**kwargs):
"""Copy this node and return the new node.
Creates and returns a copy of the node, maybe in a different place in
the hierarchy. newparent can be a Group object (see
:ref:`GroupClassDescr`) or a pathname in string form. If it is not
specified or None, the current parent group is chosen as the new
parent. newname must be a string with a new name. If it is not
specified or None, the current name is chosen as the new name. If
recursive copy is stated, all descendants are copied as well. If
createparents is true, the needed groups for the given new parent group
path to exist will be created.
Copying a node across databases is supported but can not be
undone. Copying a node over itself is not allowed, nor it is
recursively copying a node into itself. These result in a
NodeError. Copying over another existing node is similarly not allowed,
unless the optional overwrite argument is true, in which case that node
is recursively removed before copying.
Additional keyword arguments may be passed to customize the copying
process. For instance, title and filters may be changed, user
attributes may be or may not be copied, data may be sub-sampled, stats
may be collected, etc. See the documentation for the particular node
type.
Using only the first argument is equivalent to copying the node to a
new location without changing its name. Using only the second argument
is equivalent to making a copy of the node in the same group.
"""
self._g_check_open()
srcfile = self._v_file
srcparent = self._v_parent
srcname = self._v_name
dstparent = newparent
dstname = newname
if dstparent is None and dstname is None:
raise NodeError("you should specify at least "
"a ``newparent`` or a ``newname`` parameter")
if dstparent is None:
dstparent = srcparent
if dstname is None:
dstname = srcname
if hasattr(dstparent, '_v_file'):
dstfile = dstparent._v_file
dstpath = dstparent._v_pathname
elif hasattr(dstparent, 'startswith'):
dstfile = srcfile
dstpath = dstparent
else:
raise TypeError("new parent is not a node nor a path: %r"
% (dstparent,))
if dstfile is srcfile:
srcpath = srcparent._v_pathname
if dstpath == srcpath and dstname == srcname:
raise NodeError(
"source and destination nodes are the same node: ``%s``"
% self._v_pathname)
if recursive:
self._g_check_not_contains(dstpath)
dstparent = srcfile._get_or_create_path(dstparent, createparents)
self._g_check_group(dstparent)
if dstfile is not srcfile and srcfile.is_undo_enabled():
warnings.warn("copying across databases can not be undone "
"nor redone from this database",
UndoRedoWarning)
self._g_maybe_remove(dstparent, dstname, overwrite)
return self._g_copy(dstparent, dstname, recursive, **kwargs)
def _f_isvisible(self):
"""Is this node visible?"""
self._g_check_open()
return isvisiblepath(self._v_pathname)
def _g_check_group(self, node):
if not isinstance(node, class_name_dict['Node']):
raise TypeError("new parent is not a registered node: %s"
% node._v_pathname)
if not isinstance(node, class_name_dict['Group']):
raise TypeError("new parent node ``%s`` is not a group"
% node._v_pathname)
def _g_check_not_contains(self, pathname):
mypathname = self._v_pathname
if (mypathname == '/'
or pathname == mypathname
or pathname.startswith(mypathname + '/')):
raise NodeError("can not move or recursively copy node ``%s`` "
"into itself" % mypathname)
def _g_maybe_remove(self, parent, name, overwrite):
if name in parent:
if not overwrite:
raise NodeError("""\
destination group ``%s`` already has a node named ``%s``; \
you may want to use the ``overwrite`` argument""" % (parent._v_pathname, name))
parent._f_get_child(name)._f_remove(True)
def _g_check_name(self, name):
"""Check validity of name for this particular kind of node.
This is invoked once the standard HDF5 and natural naming checks
have successfully passed.
"""
if name.startswith('_i_'):
raise ValueError(
"node name starts with reserved prefix ``_i_``: %s" % name)
def _f_getattr(self, name):
"""Get a PyTables attribute from this node.
If the named attribute does not exist, an AttributeError is
raised.
"""
return getattr(self._v_attrs, name)
def _f_setattr(self, name, value):
"""Set a PyTables attribute for this node.
If the node already has a large number of attributes, a
PerformanceWarning is issued.
"""
setattr(self._v_attrs, name, value)
def _f_delattr(self, name):
"""Delete a PyTables attribute from this node.
If the named attribute does not exist, an AttributeError is
raised.
"""
delattr(self._v_attrs, name)
class NotLoggedMixin:
_AttributeSet = NotLoggedAttributeSet
def _g_log_create(self):
pass
def _g_log_move(self, oldpathname):
pass
def _g_remove_and_log(self, recursive, force):
self._g_remove(recursive, force)
|
data/acil-bwh/SlicerCIP/Scripted/attic/PicasaSnap/gdata/geo/data.py
|
"""Contains the data classes of the Geography Extension"""
__author__ = 'j.s@google.com (Jeff Scudder)'
import atom.core
GEORSS_TEMPLATE = '{http://www.georss.org/georss/}%s'
GML_TEMPLATE = '{http://www.opengis.net/gml/}%s'
GEO_TEMPLATE = '{http://www.w3.org/2003/01/geo/wgs84_pos
class GeoLat(atom.core.XmlElement):
"""Describes a W3C latitude."""
_qname = GEO_TEMPLATE % 'lat'
class GeoLong(atom.core.XmlElement):
"""Describes a W3C longitude."""
_qname = GEO_TEMPLATE % 'long'
class GeoRssBox(atom.core.XmlElement):
"""Describes a geographical region."""
_qname = GEORSS_TEMPLATE % 'box'
class GeoRssPoint(atom.core.XmlElement):
"""Describes a geographical location."""
_qname = GEORSS_TEMPLATE % 'point'
class GmlLowerCorner(atom.core.XmlElement):
"""Describes a lower corner of a region."""
_qname = GML_TEMPLATE % 'lowerCorner'
class GmlPos(atom.core.XmlElement):
"""Describes a latitude and longitude."""
_qname = GML_TEMPLATE % 'pos'
class GmlPoint(atom.core.XmlElement):
"""Describes a particular geographical point."""
_qname = GML_TEMPLATE % 'Point'
pos = GmlPos
class GmlUpperCorner(atom.core.XmlElement):
"""Describes an upper corner of a region."""
_qname = GML_TEMPLATE % 'upperCorner'
class GmlEnvelope(atom.core.XmlElement):
"""Describes a Gml geographical region."""
_qname = GML_TEMPLATE % 'Envelope'
lower_corner = GmlLowerCorner
upper_corner = GmlUpperCorner
class GeoRssWhere(atom.core.XmlElement):
"""Describes a geographical location or region."""
_qname = GEORSS_TEMPLATE % 'where'
Point = GmlPoint
Envelope = GmlEnvelope
class W3CPoint(atom.core.XmlElement):
"""Describes a W3C geographical location."""
_qname = GEO_TEMPLATE % 'Point'
long = GeoLong
lat = GeoLat
|
data/Yubico/python-pyhsm/test/test_soft_hsm.py
|
import sys
import unittest
import pyhsm
import test_common
class TestSoftHSM(test_common.YHSM_TestCase):
def setUp(self):
test_common.YHSM_TestCase.setUp(self)
self.nonce = "4d4d4d4d4d4d".decode('hex')
self.key = "A" * 16
def test_aes_CCM_encrypt_decrypt(self):
""" Test decrypting encrypted data. """
key = chr(0x09) * 16
key_handle = 1
plaintext = "foo".ljust(16, chr(0x0))
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
pt = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, ct, decrypt = True)
self.assertEquals(plaintext, pt)
def test_aes_CCM_wrong_key(self):
""" Test decrypting encrypted data with wrong key. """
key = chr(0x09) * 16
key_handle = 1
plaintext = "foo".ljust(16, chr(0x0))
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
key = chr(0x08) * 16
self.assertRaises(pyhsm.exception.YHSM_Error, pyhsm.soft_hsm.aesCCM,
key, key_handle, self.nonce, ct, decrypt = True)
def test_aes_CCM_wrong_key_handle(self):
""" Test decrypting encrypted data with wrong key_handle. """
key = chr(0x09) * 16
key_handle = 1
plaintext = "foo".ljust(16, chr(0x0))
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
key_handle = 2
self.assertRaises(pyhsm.exception.YHSM_Error, pyhsm.soft_hsm.aesCCM,
key, key_handle, self.nonce, ct, decrypt = True)
def test_soft_simple_aead_generation(self):
""" Test soft_hsm simple AEAD generation. """
key_handle = 0x2000
plaintext = 'foo'.ljust(16, chr(0x0))
key = str("2000" * 16).decode('hex')
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
aead = self.hsm.generate_aead_simple(self.nonce, key_handle, plaintext)
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
self.assertEquals(aead.data, ct)
pt = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, ct, decrypt = True)
self.assertEquals(plaintext, pt)
def test_soft_generate_long_aead(self):
""" Test soft_hsm generation of long AEAD. """
key_handle = 0x2000
plaintext = 'A' * 64
key = str("2000" * 16).decode('hex')
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
aead = self.hsm.generate_aead_simple(self.nonce, key_handle, plaintext)
self.assertEquals(aead.data, ct)
pt = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, ct, decrypt = True)
self.assertEquals(plaintext, pt)
def test_soft_generate_yubikey_secrets_aead(self):
""" Test soft_hsm generation of YubiKey secrets AEAD. """
key_handle = 0x2000
plaintext = 'A' * 22
key = str("2000" * 16).decode('hex')
ct = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, plaintext, decrypt = False)
aead = self.hsm.generate_aead_simple(self.nonce, key_handle, plaintext)
self.assertEquals(aead.data, ct)
pt = pyhsm.soft_hsm.aesCCM(key, key_handle, self.nonce, ct, decrypt = True)
self.assertEquals(plaintext, pt)
|
data/PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/panContentPack.py
|
"""Update app and threat lookup files
About this script
-----------------
Pulls the latest app and threat information from a firewall
or Panorama and outputs it as search results. This can be leveraged
to update the app_list.csv and threat_list.csv files
in the Palo Alto Networks Add-On (TA).
Example usage in Splunk searchbar:
Update app list:
| pancontentpack 10.5.5.5 apps
Update threat list:
| pancontentpack 10.5.5.5 threats
Where 10.5.5.5 is the ip of a firewall or Panorama.
"""
import sys
import os
import traceback
libpath = os.path.dirname(os.path.abspath(__file__))
sys.path[:0] = [os.path.join(libpath, 'lib')]
import common
import environment
import xmltodict
from collections import OrderedDict
logger = common.logging.getLogger().getChild('updateAppsThreats')
try:
import splunk.Intersplunk
import splunk.entity as entity
except ImportError as e:
logger.error("Unable to import Splunk libraries. Run command with Splunk python:"
" $SPLUNK_HOME/bin/splunk cmd python %s" % __file__)
sys.exit(3)
libpath = os.path.dirname(os.path.abspath(__file__))
sys.path[:0] = [os.path.join(libpath, 'lib')]
sys.path[:0] = [os.path.join(libpath, 'lib', 'pan-python', 'lib')]
sys.path[:0] = [os.path.join(libpath, 'lib', 'pandevice')]
try:
import pandevice.base
import pan.xapi
except ImportError:
print "Unable to import libraries. Please run command from app's bin directory where the script is located."
exit(3)
from common import log
def usage():
common.exit_with_error("Usage: | pancontentpack <firewall/Panorama IP> <apps|threats>")
def parse_apps(apps_xml):
obj = xmltodict.parse(apps_xml)
try:
apps = obj['response']['result']['application']['entry']
except KeyError as e:
logger.error("Unable to parse app xml from firewall")
raise e
csv_apps = []
for app in apps:
a = OrderedDict()
try:
a['app'] = app['@name']
a['app:category'] = app.get('category', "")
a['app:subcategory'] = app.get('subcategory', "")
a['app:technology'] = app.get('technology', "")
a['app:risk'] = app['risk']
a['app:evasive'] = app['evasive-behavior']
a['app:excessive_bandwidth'] = app['consume-big-bandwidth']
a['app:used_by_malware'] = app['used-by-malware']
a['app:able_to_transfer_file'] = app['able-to-transfer-file']
a['app:has_known_vulnerability'] = app['has-known-vulnerability']
a['app:tunnels_other_application'] = app['tunnel-other-application']
if a['app:tunnels_other_application'] != u"yes" and a['app:tunnels_other_application'] != u"no":
a['app:tunnels_other_application'] = a['app:tunnels_other_application']['
a['app:prone_to_misuse'] = app['prone-to-misuse']
a['app:pervasive_use'] = app['pervasive-use']
a['app:is_saas'] = app.get('is-saas', "no")
a['app:default_ports'] = ""
try:
default = app['default']
if isinstance(default, list):
for d in default:
a['app:default_ports'] = d['port']['member']
break
else:
a['app:default_ports'] = default['port']['member']
except KeyError:
pass
else:
if not isinstance(a['app:default_ports'], basestring):
a['app:default_ports'] = "|".join(a['app:default_ports'])
except Exception as e:
logger.error("Error parsing app: %s" % app['@name'])
logger.error(traceback.format_exc())
common.exit_with_error(str(e))
for key in a:
a[key] = str(a[key])
csv_apps.append(a)
logger.info("Found %s apps" % len(csv_apps))
return csv_apps
def parse_threats(threats_xml):
obj = xmltodict.parse(threats_xml)
try:
phone_home = obj['response']['result']['threats']['phone-home']['entry']
vulnerability = obj['response']['result']['threats']['vulnerability']['entry']
threats = phone_home + vulnerability
except KeyError as e:
logger.error("Unable to parse threat xml from firewall")
raise e
csv_threats = []
for threat in threats:
a = OrderedDict()
try:
a['threat_id'] = threat['@name']
a['threat:name'] = threat['threatname']
a['threat:category'] = threat['category']
a['threat:severity'] = threat['severity']
a['threat:cve'] = threat.get('cve', None)
if a['threat:cve'] is not None:
a['threat:cve'] = threat['cve']['member']
if not isinstance(a['threat:cve'], basestring):
a['threat:cve'] = ", ".join(a['threat:cve'])
else:
a['threat:cve'] = ""
except KeyError as e:
logger.error("Error parsing app: %s" % threat['@name'])
raise e
for key in a:
a[key] = str(a[key])
csv_threats.append(a)
logger.info("Found %s threats" % len(csv_threats))
return csv_threats
def main():
args, kwargs = splunk.Intersplunk.getKeywordsAndOptions()
debug = common.check_debug(kwargs)
if len(args) < 2:
logger.error("pancontentpack: Wrong number of arguments: %s, expected 2.\n" % len(args))
usage()
if args[1] == "apps":
logger.info("Getting apps from content pack on Palo Alto Networks device at %s..." % args[0])
elif args[1] == "threats":
logger.info("Getting threats from content pack on Palo Alto Networks device at %s..." % args[0])
else:
usage()
results, unused1, settings = splunk.Intersplunk.getOrganizedResults()
sessionKey = settings['sessionKey']
log(debug, "Begin get API key")
apikey = common.apikey(sessionKey, args[0], debug)
device = pandevice.base.PanDevice(args[0], api_key=apikey)
try:
if args[1] == "apps":
device.xapi.get("/config/predefined/application")
app_xml = device.xapi.xml_document
csv = parse_apps(app_xml)
else:
device.xapi.get("/config/predefined/threats")
threat_xml = device.xapi.xml_document
csv = parse_threats(threat_xml)
except pan.xapi.PanXapiError as e:
common.exit_with_error(str(e))
splunk.Intersplunk.outputResults(csv)
if __name__ == "__main__":
main()
|
data/OpenKMIP/PyKMIP/kmip/tests/unit/pie/objects/test_opaque_object.py
|
import binascii
import testtools
from kmip.core import enums
from kmip.pie.objects import ManagedObject, OpaqueObject
from kmip.pie import sqltypes
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
class TestOpaqueObject(testtools.TestCase):
"""
Test suite for OpaqueObject.
"""
def setUp(self):
super(TestOpaqueObject, self).setUp()
self.bytes_a = (
b'\x53\x65\x63\x72\x65\x74\x50\x61\x73\x73\x77\x6F\x72\x64')
self.bytes_b = (
b'\x53\x65\x63\x72\x65\x74\x50\x61\x73\x73\x77\x6F\x72\x65')
self.engine = create_engine('sqlite:///:memory:', echo=True)
sqltypes.Base.metadata.create_all(self.engine)
def tearDown(self):
super(TestOpaqueObject, self).tearDown()
def test_init(self):
"""
Test that a OpaqueObject object can be instantiated.
"""
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE)
self.assertEqual(obj.value, self.bytes_a)
self.assertEqual(obj.opaque_type, enums.OpaqueDataType.NONE)
self.assertEqual(obj.names, ['Opaque Object'])
def test_init_with_args(self):
"""
Test that a OpaqueObject object can be instantiated with all arguments.
"""
obj = OpaqueObject(
self.bytes_a,
enums.OpaqueDataType.NONE,
name='Test Opaque Object')
self.assertEqual(obj.value, self.bytes_a)
self.assertEqual(obj.opaque_type, enums.OpaqueDataType.NONE)
self.assertEqual(obj.names, ['Test Opaque Object'])
def test_get_object_type(self):
"""
Test that the object type can be retrieved from the OpaqueObject.
"""
expected = enums.ObjectType.OPAQUE_DATA
obj = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
observed = obj.object_type
self.assertEqual(expected, observed)
def test_validate_on_invalid_value(self):
"""
Test that a TypeError is raised when an invalid value is used to
construct a OpaqueObject.
"""
args = (0, enums.OpaqueDataType.NONE)
self.assertRaises(TypeError, OpaqueObject, *args)
def test_validate_on_invalid_data_type(self):
"""
Test that a TypeError is raised when an invalid data type is used to
construct a OpaqueObject.
"""
args = (self.bytes_a, 'invalid')
self.assertRaises(TypeError, OpaqueObject, *args)
def test_validate_on_invalid_name(self):
"""
Test that a TypeError is raised when an invalid name value is used to
construct a OpaqueObject.
"""
args = (self.bytes_a, enums.OpaqueDataType.NONE)
kwargs = {'name': 0}
self.assertRaises(TypeError, OpaqueObject, *args, **kwargs)
def test_repr(self):
"""
Test that repr can be applied to a OpaqueObject.
"""
obj = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
args = "value={0}, opaque_type={1}".format(
binascii.hexlify(self.bytes_a), enums.OpaqueDataType.NONE)
expected = "OpaqueObject({0})".format(args)
observed = repr(obj)
self.assertEqual(expected, observed)
def test_str(self):
"""
Test that str can be applied to a OpaqueObject.
"""
obj = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
expected = str(binascii.hexlify(self.bytes_a))
observed = str(obj)
self.assertEqual(expected, observed)
def test_equal_on_equal(self):
"""
Test that the equality operator returns True when comparing two
OpaqueObject objects with the same data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
self.assertTrue(a == b)
self.assertTrue(b == a)
def test_equal_on_not_equal_value(self):
"""
Test that the equality operator returns False when comparing two
OpaqueObject objects with different data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_b, enums.OpaqueDataType.NONE)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_data_type(self):
"""
Test that the equality operator returns False when comparing two
OpaqueObject objects with different data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b.opaque_type = "invalid"
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_type_mismatch(self):
"""
Test that the equality operator returns False when comparing a
OpaqueObject object to a non-OpaqueObject object.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = "invalid"
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_not_equal_on_equal(self):
"""
Test that the inequality operator returns False when comparing
two OpaqueObject objects with the same internal data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
self.assertFalse(a != b)
self.assertFalse(b != a)
def test_not_equal_on_not_equal_value(self):
"""
Test that the equality operator returns True when comparing two
OpaqueObject objects with different data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_b, enums.OpaqueDataType.NONE)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_data_type(self):
"""
Test that the equality operator returns True when comparing two
OpaqueObject objects with different data.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b.opaque_type = "invalid"
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_type_mismatch(self):
"""
Test that the equality operator returns True when comparing a
OpaqueObject object to a non-OpaqueObject object.
"""
a = OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)
b = "invalid"
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_save(self):
"""
Test that the object can be saved using SQLAlchemy. This will add it to
the database, verify that no exceptions are thrown, and check that its
unique identifier was set.
"""
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
def test_get(self):
"""
Test that the object can be saved and then retrieved using SQLAlchemy.
This adds is to the database and then retrieves it by ID and verifies
some of the attributes.
"""
test_name = 'bowser'
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=test_name)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEqual(1, len(get_obj.names))
self.assertEqual([test_name], get_obj.names)
self.assertEqual(self.bytes_a, get_obj.value)
self.assertEqual(enums.ObjectType.OPAQUE_DATA, get_obj.object_type)
self.assertEqual(enums.OpaqueDataType.NONE, get_obj.opaque_type)
def test_add_multiple_names(self):
"""
Test that multiple names can be added to a managed object. This
verifies a few properties. First this verifies that names can be added
using simple strings. It also verifies that the index for each
subsequent string is set accordingly. Finally this tests that the names
can be saved and retrieved from the database.
"""
expected_names = ['bowser', 'frumpy', 'big fat cat']
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=expected_names[0])
obj.names.append(expected_names[1])
obj.names.append(expected_names[2])
self.assertEquals(3, obj.name_index)
expected_mo_names = list()
for i, name in enumerate(expected_names):
expected_mo_names.append(sqltypes.ManagedObjectName(name, i))
self.assertEquals(expected_mo_names, obj._names)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_mo_names, get_obj._names)
def test_remove_name(self):
"""
Tests that a name can be removed from the list of names. This will
verify that the list of names is correct. It will verify that updating
this object removes the name from the database.
"""
names = ['bowser', 'frumpy', 'big fat cat']
remove_index = 1
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=names[0])
obj.names.append(names[1])
obj.names.append(names[2])
obj.names.pop(remove_index)
self.assertEquals(3, obj.name_index)
expected_names = list()
expected_mo_names = list()
for i, name in enumerate(names):
if i != remove_index:
expected_names.append(name)
expected_mo_names.append(sqltypes.ManagedObjectName(name, i))
self.assertEquals(expected_names, obj.names)
self.assertEquals(expected_mo_names, obj._names)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_names, get_obj.names)
self.assertEquals(expected_mo_names, get_obj._names)
def test_remove_and_add_name(self):
"""
Tests that names can be removed from the list of names and more added.
This will verify that the list of names is correct. It will verify that
updating this object removes the name from the database. It will verify
that the indices for the removed names are not reused.
"""
names = ['bowser', 'frumpy', 'big fat cat']
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=names[0])
obj.names.append(names[1])
obj.names.append(names[2])
obj.names.pop()
obj.names.pop()
obj.names.append('dog')
self.assertEquals(4, obj.name_index)
expected_names = ['bowser', 'dog']
expected_mo_names = list()
expected_mo_names.append(sqltypes.ManagedObjectName(expected_names[0],
0))
expected_mo_names.append(sqltypes.ManagedObjectName(expected_names[1],
3))
self.assertEquals(expected_names, obj.names)
self.assertEquals(expected_mo_names, obj._names)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_names, get_obj.names)
self.assertEquals(expected_mo_names, get_obj._names)
def test_update_with_add_name(self):
"""
Tests that an OpaqueObject already stored in the database can be
updated. This will store an OpaqueObject in the database. It will add a
name to it in one session, and then retrieve it in another session to
verify that it has all of the correct names.
This test and the subsequent test_udpate_* methods are different than
the name tests above because these are updating objects already stored
in the database. This tests will simulate what happens when the KMIP
client calls an add attribute method.
"""
first_name = 'bowser'
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=first_name)
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
added_name = 'frumpy'
expected_names = [first_name, added_name]
expected_mo_names = list()
for i, name in enumerate(expected_names):
expected_mo_names.append(sqltypes.ManagedObjectName(name, i))
session = Session()
update_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
update_obj.names.append(added_name)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_names, get_obj.names)
self.assertEquals(expected_mo_names, get_obj._names)
def test_update_with_remove_name(self):
"""
Tests that an OpaqueObject already stored in the database can be
updated. This will store an OpaqueObject in the database. It will
remove a name from it in one session, and then retrieve it in another
session to verify that it has all of the correct names.
"""
names = ['bowser', 'frumpy', 'big fat cat']
remove_index = 1
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=names[0])
obj.names.append(names[1])
obj.names.append(names[2])
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
expected_names = list()
expected_mo_names = list()
for i, name in enumerate(names):
if i != remove_index:
expected_names.append(name)
expected_mo_names.append(sqltypes.ManagedObjectName(name, i))
session = Session()
update_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
update_obj.names.pop(remove_index)
session.commit()
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_names, get_obj.names)
self.assertEquals(expected_mo_names, get_obj._names)
def test_update_with_remove_and_add_name(self):
"""
Tests that an OpaqueObject already stored in the database can be
updated. This will store an OpaqueObject in the database. It will
remove a name and add another one to it in one session, and then
retrieve it in another session to verify that it has all of the correct
names. This simulates multiple operation being sent for the same
object.
"""
names = ['bowser', 'frumpy', 'big fat cat']
obj = OpaqueObject(
self.bytes_a, enums.OpaqueDataType.NONE, name=names[0])
obj.names.append(names[1])
obj.names.append(names[2])
Session = sessionmaker(bind=self.engine)
session = Session()
session.add(obj)
session.commit()
session = Session()
update_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
update_obj.names.pop()
update_obj.names.pop()
update_obj.names.append('dog')
session.commit()
expected_names = ['bowser', 'dog']
expected_mo_names = list()
expected_mo_names.append(sqltypes.ManagedObjectName(expected_names[0],
0))
expected_mo_names.append(sqltypes.ManagedObjectName(expected_names[1],
3))
session = Session()
get_obj = session.query(OpaqueObject).filter(
ManagedObject.unique_identifier == obj.unique_identifier
).one()
session.commit()
self.assertEquals(expected_names, get_obj.names)
self.assertEquals(expected_mo_names, get_obj._names)
|
data/VisTrails/VisTrails/examples/vtk_examples/VisualizationAlgorithms/probeComb.py
|
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
pl3d = vtk.vtkPLOT3DReader()
pl3d.SetXYZFileName(VTK_DATA_ROOT + "/Data/combxyz.bin")
pl3d.SetQFileName(VTK_DATA_ROOT + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(100)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
plane = vtk.vtkPlaneSource()
plane.SetResolution(50, 50)
transP1 = vtk.vtkTransform()
transP1.Translate(3.7, 0.0, 28.37)
transP1.Scale(5, 5, 5)
transP1.RotateY(90)
tpd1 = vtk.vtkTransformPolyDataFilter()
tpd1.SetInputConnection(plane.GetOutputPort())
tpd1.SetTransform(transP1)
outTpd1 = vtk.vtkOutlineFilter()
outTpd1.SetInputConnection(tpd1.GetOutputPort())
mapTpd1 = vtk.vtkPolyDataMapper()
mapTpd1.SetInputConnection(outTpd1.GetOutputPort())
tpd1Actor = vtk.vtkActor()
tpd1Actor.SetMapper(mapTpd1)
tpd1Actor.GetProperty().SetColor(0, 0, 0)
transP2 = vtk.vtkTransform()
transP2.Translate(9.2, 0.0, 31.20)
transP2.Scale(5, 5, 5)
transP2.RotateY(90)
tpd2 = vtk.vtkTransformPolyDataFilter()
tpd2.SetInputConnection(plane.GetOutputPort())
tpd2.SetTransform(transP2)
outTpd2 = vtk.vtkOutlineFilter()
outTpd2.SetInputConnection(tpd2.GetOutputPort())
mapTpd2 = vtk.vtkPolyDataMapper()
mapTpd2.SetInputConnection(outTpd2.GetOutputPort())
tpd2Actor = vtk.vtkActor()
tpd2Actor.SetMapper(mapTpd2)
tpd2Actor.GetProperty().SetColor(0, 0, 0)
transP3 = vtk.vtkTransform()
transP3.Translate(13.27, 0.0, 33.30)
transP3.Scale(5, 5, 5)
transP3.RotateY(90)
tpd3 = vtk.vtkTransformPolyDataFilter()
tpd3.SetInputConnection(plane.GetOutputPort())
tpd3.SetTransform(transP3)
outTpd3 = vtk.vtkOutlineFilter()
outTpd3.SetInputConnection(tpd3.GetOutputPort())
mapTpd3 = vtk.vtkPolyDataMapper()
mapTpd3.SetInputConnection(outTpd3.GetOutputPort())
tpd3Actor = vtk.vtkActor()
tpd3Actor.SetMapper(mapTpd3)
tpd3Actor.GetProperty().SetColor(0, 0, 0)
appendF = vtk.vtkAppendPolyData()
appendF.AddInput(tpd1.GetOutput())
appendF.AddInput(tpd2.GetOutput())
appendF.AddInput(tpd3.GetOutput())
probe = vtk.vtkProbeFilter()
probe.SetInputConnection(appendF.GetOutputPort())
probe.SetSource(pl3d.GetOutput())
contour = vtk.vtkContourFilter()
contour.SetInputConnection(probe.GetOutputPort())
contour.GenerateValues(50, pl3d.GetOutput().GetScalarRange())
contourMapper = vtk.vtkPolyDataMapper()
contourMapper.SetInputConnection(contour.GetOutputPort())
contourMapper.SetScalarRange(pl3d.GetOutput().GetScalarRange())
planeActor = vtk.vtkActor()
planeActor.SetMapper(contourMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputConnection(pl3d.GetOutputPort())
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(0, 0, 0)
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
ren.AddActor(outlineActor)
ren.AddActor(planeActor)
ren.AddActor(tpd1Actor)
ren.AddActor(tpd2Actor)
ren.AddActor(tpd3Actor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(400, 400)
ren.ResetCamera()
cam1 = ren.GetActiveCamera()
cam1.SetClippingRange(3.95297, 50)
cam1.SetFocalPoint(8.88908, 0.595038, 29.3342)
cam1.SetPosition(-12.3332, 31.7479, 41.2387)
cam1.SetViewUp(0.060772, -0.319905, 0.945498)
iren.Initialize()
renWin.Render()
iren.Start()
|
data/OpenMDM/OpenMDM/public_gate/templatetags/app_filters.py
|
from django import template
register = template.Library()
@register.filter(name='get_item')
def get_item(dictionary, key):
return getattr(dictionary, key)
|
data/STIXProject/python-stix/stix/test/common/information_source_test.py
|
import unittest
from stix.test import EntityTestCase
from stix.test.common import structured_text_tests
from stix.common import InformationSource
class InformationSourceTests(EntityTestCase, unittest.TestCase):
klass = InformationSource
_full_dict = {
'description': "An amazing source",
'identity': {
'name': "Spiderman",
},
'roles': [
{
'value': 'Initial Author',
'xsi:type': 'stixVocabs:InformationSourceRoleVocab-1.0'
},
{
'value': 'Transformer/Translator',
'xsi:type': 'stixVocabs:InformationSourceRoleVocab-1.0'
}
],
'contributing_sources': {
'sources': [
{
'identity': {
'name': "Batman",
},
'description': 'Source
},
{
'identity': {
'name': "Superman",
},
'description': 'Source
}
]
},
'references' : ['http://example.com'],
'time': {
'start_time': "2010-11-12T01:02:03",
'end_time': "2013-12-11T03:02:01",
'produced_time': "2013-12-11T03:02:01",
'received_time': "2013-12-11T03:02:01",
},
'tools': [
{
'name': "Web",
'description': "Superwebs",
},
{
'name': "Tubes",
'description': "Supertubes",
},
],
'references': [
'http://example.com',
'http://example.com'
]
}
class InformationSourceMultiDescTests(EntityTestCase, unittest.TestCase):
klass = InformationSource
_full_dict = {
'description': structured_text_tests.StructuredTextListTests._full_dict
}
if __name__ == "__main__":
unittest.main()
|
data/RHInception/jsonstats/JsonStats/Utils.py
|
import fnmatch
import os
import os.path
import re
import sys
try:
import json
except:
import simplejson as json
def dump_sorted_json_string(input, **kwargs):
"""
Given a datastructure, return a JSON formatted string of it with
all dictionary keys sorted.
* `input` - arbitrary Python datastructure
* `**kwargs` - Arbitrary keyword arguments to pass to the
`json.dumps` method. For example, you might want to pass a
custom :py:class:`json.JSONEncoder` class. See the docs for a
complete list of the available parameters.
* `json.dumps` docs - http://docs.python.org/2.7/library/json.html
"""
return json.dumps(input, sort_keys=True, separators=(',', ': '), **kwargs)
def load_extra_plugins(pathspec):
"""
Load extra fact plugins from a user specified directory
`pathspec` - A string of either: a single directory path, or a
compound (colon separated) list of paths
"""
loaded_plugins = []
paths = pathspec.split(':')
for path in paths:
loaded_plugins.extend(_load_plugins_from_dir(path))
return loaded_plugins
def _load_plugins_from_dir(path):
"""
Load plugins from a given path
"""
plugins = []
loaded_plugins = []
full_path = os.path.expanduser(path)
try:
filtered = fnmatch.filter(os.listdir(path), '*.py')
plugins.extend(filtered)
except OSError:
pass
else:
sys.path.insert(1, path)
for plugin in plugins:
match = re.search('(?P<name>.*)(?P<ext>\.py$)', plugin)
if match:
try:
__import__(match.group('name'), globals(), locals(), [], -1)
except:
pass
else:
loaded_plugins.append(match.group('name'))
return loaded_plugins
|
data/adlibre/Adlibre-DMS/adlibre_dms/apps/browser/forms.py
|
"""
Module: DMS Browser Django Forms
Project: Adlibre DMS
Copyright: Adlibre Pty Ltd 2011
License: See LICENSE for license information
"""
from django import forms
class UploadForm(forms.Form):
file = forms.FileField(widget=forms.FileInput(attrs={'size':40}))
|
data/PMEAL/OpenPNM/OpenPNM/Geometry/models/throat_vector.py
|
r"""
===============================================================================
Submodule -- throat_vector
===============================================================================
"""
import scipy as _sp
def pore_to_pore(geometry, **kwargs):
r"""
Calculates throat vector as straight path between connected pores.
Parameters
----------
geometry : OpenPNM Geometry object
The object containing the geometrical properties of the throats
Notes
-----
There is an important impicit assumption here: the positive direction is
taken as the direction from the pore with the lower index to the higher.
This corresponds to the pores in the 1st and 2nd columns of the
'throat.conns' array as stored on the etwork.
"""
network = geometry._net
throats = network.throats(geometry.name)
pores = network.find_connected_pores(throats, flatten=False)
C0 = network['pore.coords'][pores, 0]
C1 = network['pore.coords'][pores, 1]
V = C1 - C0
L = _sp.array(_sp.sqrt(_sp.sum(V[:, :]**2, axis=1)), ndmin=1)
value = V/_sp.array(L, ndmin=2).T
return value
|
data/JoelBender/bacpypes/py27/bacpypes/vlan.py
|
"""
Virtual Local Area Network
"""
import random
from copy import deepcopy
from .errors import ConfigurationError
from .debugging import ModuleLogger, bacpypes_debugging
from .core import deferred
from .pdu import Address
from .comm import Server
_debug = 0
_log = ModuleLogger(globals())
@bacpypes_debugging
class Network:
def __init__(self, dropPercent=0.0):
if _debug: Network._debug("__init__ dropPercent=%r", dropPercent)
self.nodes = []
self.dropPercent = dropPercent
def add_node(self, node):
""" Add a node to this network, let the node know which network it's on. """
if _debug: Network._debug("add_node %r", node)
self.nodes.append(node)
node.lan = self
def remove_node(self, node):
""" Remove a node from this network. """
if _debug: Network._debug("remove_node %r", node)
self.nodes.remove(node)
node.lan = None
def process_pdu(self, pdu):
""" Process a PDU by sending a copy to each node as dictated by the
addressing and if a node is promiscuous.
"""
if _debug: Network._debug("process_pdu %r", pdu)
if self.dropPercent != 0.0:
if (random.random() * 100.0) < self.dropPercent:
if _debug: Network._debug(" - packet dropped")
return
if not pdu.pduDestination or not isinstance(pdu.pduDestination, Address):
raise RuntimeError("invalid destination address")
elif pdu.pduDestination.addrType == Address.localBroadcastAddr:
for n in self.nodes:
if (pdu.pduSource != n.address):
n.response(deepcopy(pdu))
elif pdu.pduDestination.addrType == Address.localStationAddr:
for n in self.nodes:
if n.promiscuous or (pdu.pduDestination == n.address):
n.response(deepcopy(pdu))
else:
raise RuntimeError("invalid destination address type")
def __len__(self):
""" Simple way to determine the number of nodes in the network. """
if _debug: Network._debug("__len__")
return len(self.nodes)
@bacpypes_debugging
class Node(Server):
def __init__(self, addr, lan=None, promiscuous=False, spoofing=False, sid=None):
if _debug:
Node._debug("__init__ %r lan=%r promiscuous=%r spoofing=%r sid=%r",
addr, lan, promiscuous, spoofing, sid
)
Server.__init__(self, sid)
if not isinstance(addr, Address):
raise TypeError("addr must be an address")
self.lan = None
self.address = addr
if lan:
self.bind(lan)
self.promiscuous = promiscuous
self.spoofing = spoofing
def bind(self, lan):
"""bind to a LAN."""
if _debug: Node._debug("bind %r", lan)
lan.add_node(self)
def indication(self, pdu):
"""Send a message."""
if _debug: Node._debug("indication %r", pdu)
if not self.lan:
raise ConfigurationError("unbound node")
if pdu.pduSource is None:
pdu.pduSource = self.address
elif (not self.spoofing) and (pdu.pduSource != self.address):
raise RuntimeError("spoofing address conflict")
deferred(self.lan.process_pdu, pdu)
|
data/LEAF-BoiseState/SPEED/Module05/PenmanMonteithEx.py
|
"""
Spyder Editor
This is a temporary script file.
"""
from math import *
def AirDensity(RH, Tc, P=101.2):
Rd = 286.9
q = 0.622*(RH*SatVapor(Tc))/P
Tv = (Tc + 273.15)*(1.0 + 0.61*q)
P *= 1000.0
rho_a = P/(Rd*Tv)
return rho_a
def PsychConst(P, cP=1.013, lambda_v=2.26e3):
gamma = (cP*P/(0.622*lambda_v))
return gamma
def SatVaporPress(Tc):
eSat = 0.61*exp(17.27*Tc/(237.3 + Tc))
return eSat
def SlopeSatVaporPress(Tc):
delta = 4098.0*SatVaporPress(Tc)/(237.3 + Tc)**2
return delta
def AeroReist(um, zm, z0, d, zmp=zm):
k = 0.4
r_a = 1.0/(k**2*um)*log((zm - d)/z0)*log((zmp-d)/(z0/10.0))
return r_a
def SurfResist(g0, S, D, Tc, SM, SM0):
g_c = Gee_C()
g_R = Gee_R(S)
g_D = Gee_D(D)
g_T = Gee_T(Tc + 273.15)
g_M = Gee_M(SM, SM0)
g_s = g0*g_c*g_R*g_D*g_T*g_M
r_s = 1.0/g_s
return r_s
def Gee_c():
g_c = 1.0
return g_c
def Gee_R(S, K_R=200.0):
g_R = (S*(1000.0 + K_R))/(1000.0*(S+K_R))
return g_R
def Gee_D(D,K_D1=-0.307, K_D2=0.019):
g_D = 1.0 + K_D1*D + K_D2*D**2
return g_D
def Gee_T(TK, TL=273.0, TH=313.0, T0=293.0):
alpha_T = (TH - T0)/(T0 - TL)
g_T = ((TK - TL)*(TH - TK)**alpha_T)/((T0 - TL)*(TH - T0)**alpha_T)
return g_T
def Gee_M(SM, SM0, K_M1, K_M2):
g_SM = 1.0 - K_M1*exp(K_M2*(SM - SM0))
return g_SM
def PenmanMonteithPET(Tc, RH, Rn, S, SM, um, z0, d, g0, SM0, P=101.2, zm=2.0):
cP = 1.013
rho_a = AirDensity(RH, Tc, P)
D = (1.0 - RH)*SatVaporPress(Tc)
delta = SlopeSatVaporPress(Tc)
gamma = PsychConst(P)
r_a = AeroReist(um, zm, z0, d)
r_s = SurfResist(g0, S, D, Tc, SM, SM0)
LE = (delta*Rn + (rho_a*cP*D)/r_a)/(delta + gamma*(1.0 + r_s/r_a))
return LE
|
data/Yelp/pyleus/tests/cli/build_test.py
|
import glob
import os
import shutil
import zipfile
import pytest
from pyleus import __version__
from pyleus import exception
from pyleus.cli import build
from pyleus.testing import mock
class TestBuild(object):
@mock.patch.object(os.path, 'exists', autospec=True)
def test__open_jar_jarfile_not_found(self, mock_exists):
mock_exists.return_value = False
with pytest.raises(exception.JarError):
build._open_jar("foo")
mock_exists.assert_called_once_with("foo")
@mock.patch.object(os.path, 'exists', autospec=True)
@mock.patch.object(zipfile, 'is_zipfile', autospec=True)
def test__open_jar_not_jarfile(self, mock_is_zipfile, mock_exists):
mock_exists.return_value = True
mock_is_zipfile.return_value = False
with pytest.raises(exception.JarError):
build._open_jar("foo")
mock_is_zipfile.assert_called_once_with("foo")
@mock.patch.object(os, 'walk', autospec=True)
def test__zip_dir(self, mock_walk):
mock_arc = mock.Mock(autospec=True)
mock_walk.return_value = [
("foo", ["bar"], ["baz"]),
("foo/bar", [], ["qux"])
]
build._zip_dir("foo", mock_arc)
mock_walk.assert_any_call("foo")
expected = [
mock.call("foo/baz", "baz", zipfile.ZIP_DEFLATED),
mock.call("foo/bar/qux", "bar/qux", zipfile.ZIP_DEFLATED),
]
mock_arc.write.assert_has_calls(expected)
@mock.patch.object(zipfile, 'ZipFile', autospec=True)
@mock.patch.object(build, '_zip_dir', autospec=True)
def test__pack_jar(self, mock_zip_dir, mock_zipfile):
build._pack_jar("foo", "bar")
mock_zipfile.assert_called_once_with("bar", "w")
mock_zip_dir.assert_called_once_with("foo", mock_zipfile.return_value)
@mock.patch.object(os.path, 'exists', autospec=True)
def test__validate_venv_dir_contains_venv(self, mock_exists):
mock_exists.return_value = True
with pytest.raises(exception.InvalidTopologyError):
build._validate_venv("foo", "foo/bar_venv")
mock_exists.assert_called_once_with("foo/bar_venv")
@mock.patch.object(build, '_remove_pyleus_base_jar', autospec=True)
@mock.patch.object(build, 'VirtualenvProxy', autospec=True)
def test__set_up_virtualenv_with_requirements(self, mock_venv,
mock_remove_base_jar):
venv = mock_venv.return_value
build._set_up_virtualenv(
venv_name="foo",
tmp_dir="bar",
req="baz.txt",
include_packages=["fruit", "ninja==7.7.7"],
system_site_packages=True,
pypi_index_url="http://pypi-ninja.ninjacorp.com/simple",
python_interpreter="python2.7",
verbose=False)
expected_install = [
mock.call("pyleus=={0}".format(__version__)),
mock.call("fruit"),
mock.call("ninja==7.7.7")
]
venv.install_package.assert_has_calls(expected_install)
venv.install_from_requirements.assert_called_once_with("baz.txt")
mock_remove_base_jar.assert_called_once_with(venv)
@mock.patch.object(build, '_remove_pyleus_base_jar', autospec=True)
@mock.patch.object(build, 'VirtualenvProxy', autospec=True)
def test__set_up_virtualenv_without_requirements(self, mock_venv,
mock_remove_base_jar):
venv = mock_venv.return_value
build._set_up_virtualenv(
venv_name="foo",
tmp_dir="bar",
req=None,
include_packages=["fruit", "ninja==7.7.7"],
system_site_packages=True,
pypi_index_url="http://pypi-ninja.ninjacorp.com/simple",
python_interpreter="python2.7",
verbose=False)
expected_install = [
mock.call("pyleus=={0}".format(__version__)),
mock.call("fruit"),
mock.call("ninja==7.7.7")
]
venv.install_package.assert_has_calls(expected_install)
assert venv.install_from_requirements.call_count == 0
mock_remove_base_jar.assert_called_once_with(venv)
@mock.patch.object(glob, 'glob', autospec=True)
def test__content_to_copy(self, mock_glob):
mock_glob.return_value = ["foo/good1.mkv", "foo/good2.bat",
"foo/bad1.txt", "foo/bad2.jar"]
content = build._content_to_copy("foo", ["foo/bad1.txt",
"foo/bad2.jar"])
mock_glob.assert_called_once_with("foo/*")
assert content == set(["foo/good1.mkv", "foo/good2.bat"])
@mock.patch.object(build, '_content_to_copy', autospec=True)
@mock.patch.object(os.path, 'isdir', autospec=True)
@mock.patch.object(shutil, 'copytree', autospec=True)
@mock.patch.object(shutil, 'copy2', autospec=True)
def test__copy_dir_content(
self, mock_copy2, mock_copytree, mock_isdir, mock_cont_to_copy):
mock_cont_to_copy.return_value = ["foo/ham", "foo/honey"]
mock_isdir.side_effect = iter([True, False])
build._copy_dir_content(src="foo", dst="bar", exclude=[])
mock_cont_to_copy.assert_called_once_with("foo", [])
expected = [mock.call("foo/ham"), mock.call("foo/honey")]
mock_isdir.assert_has_calls(expected)
mock_copytree.assert_called_once_with(
"foo/ham", "bar/ham", symlinks=True)
mock_copy2.assert_called_once_with("foo/honey", "bar")
@mock.patch.object(build, 'expand_path', autospec=True)
def test__build_otuput_path(self, mock_ex_path):
build._build_output_path("foo", "bar")
mock_ex_path.assert_called_with("foo")
build._build_output_path(None, "bar")
mock_ex_path.assert_called_with("bar.jar")
def test__path_contained_by(self):
p1 = '/foo//bar/baz/../stuff/'
p2 = '/a/b/c/d/../../../../foo/bar/stufff'
p3 = '/a/b/c/d/../../../../foo/bar/stuff/11'
assert not build._path_contained_by(p1, p2)
assert build._path_contained_by(p1, p3)
def test__remove_pyleus_base_jar(self):
"""Remove the base jar if it is inside the virtualenv"""
mock_venv_path = "/path/to/venv"
def mock_execute_module(module, cwd):
return "/path/to/venv/inside.jar"
mock_venv = mock.Mock(
path=mock_venv_path,
execute_module=mock_execute_module,
)
with mock.patch.object(os, 'remove') as mock_remove:
build._remove_pyleus_base_jar(mock_venv)
mock_remove.assert_called_once_with("/path/to/venv/inside.jar")
def test__remove_pyleus_base_jar_no_remove(self):
"""Do not remove the base jar if it is outside the virtualenv"""
mock_venv_path = "/path/to/venv"
def mock_execute_module(module, cwd):
return "/foo/bar/outside.jar"
mock_venv = mock.Mock(
path=mock_venv_path,
execute_module=mock_execute_module,
)
with mock.patch.object(os, 'remove') as mock_remove:
build._remove_pyleus_base_jar(mock_venv)
assert not mock_remove.called
|
data/Juniper/OpenClos/jnpr/openclos/tests/unit/test_report.py
|
'''
Created on Sep 9, 2014
@author: moloyc
'''
import unittest
import os
from jnpr.openclos.report import ResourceAllocationReport, L2Report, L3Report
from test_dao import InMemoryDao
class Test(unittest.TestCase):
def setUp(self):
'''Creates with in-memory DB'''
self.__conf = {}
self.__conf['outputDir'] = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'out')
self.__conf['dbUrl'] = 'sqlite:///'
self.__conf['writeConfigInFile'] = 'false'
self.__conf['logLevel'] = {
'fabric' : 'INFO',
'reporting' : 'INFO',
'ztp' : 'INFO',
'rest' : 'INFO',
'writer' : 'INFO',
'devicePlugin' : 'INFO',
'trapd' : 'INFO',
'dao' : 'INFO'
}
self.__conf['DOT'] = {'ranksep' : '5 equally', 'colors': ['red', 'green', 'blue']}
self.__conf['deviceFamily'] = {
"qfx5100-24q-2p": {
"ports": 'et-0/0/[0-23]'
},
"qfx5100-48s-6q": {
"uplinkPorts": 'et-0/0/[48-53]',
"downlinkPorts": 'xe-0/0/[0-47]'
}
}
self._dao = InMemoryDao.getInstance()
def tearDown(self):
self._dao = None
InMemoryDao._destroy()
'''
def testGetInterconnectAllocation(self):
from test_model import createPod
pod = createPod("test", self.session)
pod.allocatedInterConnectBlock = '1.2.3.4/24'
pod.interConnectPrefix = '1.2.0.0/24'
interconnectAllocation = self.report.getInterconnectAllocation("test")
self.assertEqual('1.2.0.0/24', interconnectAllocation['block'])
self.assertEqual('1.2.3.4/24', interconnectAllocation['allocated'])
def testGetInterconnectAllocationNoPod(self):
interconnectAllocation = self.report.getInterconnectAllocation("test")
self.assertEqual({}, interconnectAllocation)
'''
def testGenerateL2Report(self):
l2Report = L2Report(self.__conf, self._dao)
from test_model import createPod
with self._dao.getReadSession() as session:
pod = createPod("test", session)
l2Report.generateReport(pod.id, True, False)
def testGenerateL3Report(self):
l3Report = L3Report(self.__conf, self._dao)
from test_model import createPod
with self._dao.getReadSession() as session:
pod = createPod("test", session)
l3Report.generateReport(pod.id, True, False)
if __name__ == "__main__":
unittest.main()
|
data/NikolayRag/typeTodo/PyMySQL/pymysql/tests/test_nextset.py
|
import unittest2
from pymysql.tests import base
from pymysql import util
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
def test_ok_and_next(self):
cur = self.con.cursor()
cur.execute("SELECT 1; commit; SELECT 2;")
self.assertEqual([(1,)], list(cur))
self.assertTrue(cur.nextset())
self.assertTrue(cur.nextset())
self.assertEqual([(2,)], list(cur))
self.assertFalse(bool(cur.nextset()))
@unittest2.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
def test_multi_statement_warnings(self):
cursor = self.con.cursor()
try:
cursor.execute('DROP TABLE IF EXISTS a; '
'DROP TABLE IF EXISTS b;')
except TypeError:
self.fail()
|
data/SheffieldML/GPy/GPy/plotting/matplot_dep/maps.py
|
import numpy as np
try:
from matplotlib import pyplot as pb
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
try:
__IPYTHON__
pb.ion()
except NameError:
pass
except:
pass
import re
def plot(shape_records,facecolor='w',edgecolor='k',linewidths=.5, ax=None,xlims=None,ylims=None):
"""
Plot the geometry of a shapefile
:param shape_records: geometry and attributes list
:type shape_records: ShapeRecord object (output of a shapeRecords() method)
:param facecolor: color to be used to fill in polygons
:param edgecolor: color to be used for lines
:param ax: axes to plot on.
:type ax: axes handle
"""
if ax is None:
fig = pb.figure()
ax = fig.add_subplot(111)
for srec in shape_records:
points = np.vstack(srec.shape.points)
sparts = srec.shape.parts
par = list(sparts) + [points.shape[0]]
polygs = []
for pj in range(len(sparts)):
polygs.append(Polygon(points[par[pj]:par[pj+1]]))
ax.add_collection(PatchCollection(polygs,facecolor=facecolor,edgecolor=edgecolor, linewidths=linewidths))
_box = np.vstack([srec.shape.bbox for srec in shape_records])
minx,miny = np.min(_box[:,:2],0)
maxx,maxy = np.max(_box[:,2:],0)
if xlims is not None:
minx,maxx = xlims
if ylims is not None:
miny,maxy = ylims
ax.set_xlim(minx,maxx)
ax.set_ylim(miny,maxy)
def string_match(sf,regex,field=2):
"""
Return the geometry and attributes of a shapefile whose fields match a regular expression given
:param sf: shapefile
:type sf: shapefile object
:regex: regular expression to match
:type regex: string
:field: field number to be matched with the regex
:type field: integer
"""
index = []
shape_records = []
for rec in enumerate(sf.shapeRecords()):
m = re.search(regex,rec[1].record[field])
if m is not None:
index.append(rec[0])
shape_records.append(rec[1])
return index,shape_records
def bbox_match(sf,bbox,inside_only=True):
"""
Return the geometry and attributes of a shapefile that lie within (or intersect) a bounding box
:param sf: shapefile
:type sf: shapefile object
:param bbox: bounding box
:type bbox: list of floats [x_min,y_min,x_max,y_max]
:inside_only: True if the objects returned are those that lie within the bbox and False if the objects returned are any that intersect the bbox
:type inside_only: Boolean
"""
A,B,C,D = bbox
index = []
shape_records = []
for rec in enumerate(sf.shapeRecords()):
a,b,c,d = rec[1].shape.bbox
if inside_only:
if A <= a and B <= b and C >= c and D >= d:
index.append(rec[0])
shape_records.append(rec[1])
else:
cond1 = A <= a and B <= b and C >= a and D >= b
cond2 = A <= c and B <= d and C >= c and D >= d
cond3 = A <= a and D >= d and C >= a and B <= d
cond4 = A <= c and D >= b and C >= c and B <= b
cond5 = a <= C and b <= B and d >= D
cond6 = c <= A and b <= B and d >= D
cond7 = d <= B and a <= A and c >= C
cond8 = b <= D and a <= A and c >= C
if cond1 or cond2 or cond3 or cond4 or cond5 or cond6 or cond7 or cond8:
index.append(rec[0])
shape_records.append(rec[1])
return index,shape_records
def plot_bbox(sf,bbox,inside_only=True):
"""
Plot the geometry of a shapefile within a bbox
:param sf: shapefile
:type sf: shapefile object
:param bbox: bounding box
:type bbox: list of floats [x_min,y_min,x_max,y_max]
:inside_only: True if the objects returned are those that lie within the bbox and False if the objects returned are any that intersect the bbox
:type inside_only: Boolean
"""
index,shape_records = bbox_match(sf,bbox,inside_only)
A,B,C,D = bbox
plot(shape_records,xlims=[bbox[0],bbox[2]],ylims=[bbox[1],bbox[3]])
def plot_string_match(sf,regex,field,**kwargs):
"""
Plot the geometry of a shapefile whose fields match a regular expression given
:param sf: shapefile
:type sf: shapefile object
:regex: regular expression to match
:type regex: string
:field: field number to be matched with the regex
:type field: integer
"""
index,shape_records = string_match(sf,regex,field)
plot(shape_records,**kwargs)
def new_shape_string(sf,name,regex,field=2,type=None):
import shapefile
if type is None:
type = shapefile.POINT
newshp = shapefile.Writer(shapeType = sf.shapeType)
newshp.autoBalance = 1
index,shape_records = string_match(sf,regex,field)
_fi = [sf.fields[j] for j in index]
for f in _fi:
newshp.field(name=f[0],fieldType=f[1],size=f[2],decimal=f[3])
_shre = shape_records
for sr in _shre:
_points = []
_parts = []
for point in sr.shape.points:
_points.append(point)
_parts.append(_points)
newshp.line(parts=_parts)
newshp.records.append(sr.record)
print(len(sr.record))
newshp.save(name)
print(index)
def apply_bbox(sf,ax):
"""
Use bbox as xlim and ylim in ax
"""
limits = sf.bbox
xlim = limits[0],limits[2]
ylim = limits[1],limits[3]
ax.set_xlim(xlim)
ax.set_ylim(ylim)
|
data/SamyPesse/glass.py/examples/foursquare/app.py
|
from flask import request, session, render_template, redirect, url_for
import glass
import foursquare
import config
app = glass.Application(
client_id=config.GOOGLE_CLIENT_ID,
client_secret=config.GOOGLE_CLIENT_SECRET,
scopes=config.GOOGLE_SCOPES,
template_folder="templates",
static_url_path='/static',
static_folder='static')
app.web.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
FOURSQUARE_TOKENS = {}
def foursquare_client():
return foursquare.Foursquare(client_id=config.FOURSQUARE_CLIENT_ID, client_secret=config.FOURSQUARE_CLIENT_SECRET, redirect_uri=config.FOURSQUARE_CLIENT_REDIRECT)
@app.web.route("/")
def index():
return render_template("index.html", auth=False)
@app.subscriptions.login
def login(user):
print "google user: %s" % (user.token)
session['token'] = user.token
return redirect("/foursquare/authorize")
@app.subscriptions.location
def change_location(user):
location = user.location()
llat = location.get('latitude')
llong = location.get('longitude')
client = foursquare.Foursquare(access_token=FOURSQUARE_TOKENS[user.token])
venues = client.venues.search(params={'ll': llat+','+llong, 'llAcc': location.get('accuracy')})
if len(venues['venues']) > 0:
user.timeline.post_template("venue.html", venue=venues['venues'][0], llat=llat, llong=llong)
@app.web.route("/foursquare/authorize")
def foursquare_authorize():
client = foursquare_client()
return redirect(client.oauth.auth_url())
@app.web.route("/foursquare/callback")
def foursquare_callback():
code = request.args.get('code', None)
client = foursquare_client()
if code is None or not 'token' in session:
return render_template("index.html", auth=False)
access_token = client.oauth.get_token(code)
FOURSQUARE_TOKENS[session['token']] = access_token
client.set_access_token(access_token)
user = client.users()
username = user['user']['firstName']
print "foursquare user: %s" % (access_token), username
userglass = glass.User(app=app, token=session['token'])
userglass.timeline.post(text="Welcome %s!" % username)
return render_template("index.html", auth=True)
if __name__ == '__main__':
print "Starting application at %s:%i" % (config.HOST, config.PORT)
app.run(port=config.PORT, host=config.HOST)
|
data/SublimeText/VintageEx/tests/test_global.py
|
import unittest
from vex.parsers.g_cmd import GlobalLexer
class TestGlobalLexer(unittest.TestCase):
def setUp(self):
self.lexer = GlobalLexer()
def testCanMatchFullPattern(self):
actual = self.lexer.parse(r'/foo/p
self.assertEqual(actual, ['foo', 'p
def testCanMatchEmtpySearch(self):
actual = self.lexer.parse(r'//p
self.assertEqual(actual, ['', 'p
def testCanEscapeCharactersInSearchPattern(self):
actual = self.lexer.parse(r'/\/foo\//p
self.assertEqual(actual, ['/foo/', 'p
def testCanEscapeBackSlashes(self):
actual = self.lexer.parse(r'/\\/p
self.assertEqual(actual, ['\\', 'p
if __name__ == '__main__':
unittest.main()
|
data/VisTrails/VisTrails/contrib/pc3/info/ipaw/pc3/LoadSql.py
|
class LoadSql(object):
CREATE_DETECTION_TABLE = \
"""
CREATE TABLE P2Detection(
`objID` bigint NOT NULL,
detectID bigint NOT NULL,
ippObjID bigint NOT NULL,
ippDetectID bigint NOT NULL,
filterID smallint NOT NULL,
imageID bigint NOT NULL,
obsTime float NOT NULL DEFAULT -999,
xPos real NOT NULL DEFAULT -999,
yPos real NOT NULL DEFAULT -999,
xPosErr real NOT NULL DEFAULT -999,
yPosErr real NOT NULL DEFAULT -999,
instFlux real NOT NULL DEFAULT -999,
instFluxErr real NOT NULL DEFAULT -999,
psfWidMajor real NOT NULL DEFAULT -999,
psfWidMinor real NOT NULL DEFAULT -999,
psfTheta real NOT NULL DEFAULT -999,
psfLikelihood real NOT NULL DEFAULT -999,
psfCf real NOT NULL DEFAULT -999,
infoFlag int NOT NULL DEFAULT -999,
htmID float NOT NULL DEFAULT -999,
zoneID float NOT NULL DEFAULT -999,
assocDate date NOT NULL DEFAULT '28881231',
modNum smallint NOT NULL DEFAULT 0,
ra float NOT NULL,
`dec` float NOT NULL,
raErr real NOT NULL DEFAULT 0,
decErr real NOT NULL DEFAULT 0,
cx float NOT NULL DEFAULT -999,
cy float NOT NULL DEFAULT -999,
cz float NOT NULL DEFAULT -999,
peakFlux real NOT NULL DEFAULT -999,
calMag real NOT NULL DEFAULT -999,
calMagErr real NOT NULL DEFAULT -999,
calFlux real NOT NULL DEFAULT -999,
calFluxErr real NOT NULL DEFAULT -999,
calColor real NOT NULL DEFAULT -999,
calColorErr real NOT NULL DEFAULT -999,
sky real NOT NULL DEFAULT -999,
skyErr real NOT NULL DEFAULT -999,
sgSep real NOT NULL DEFAULT -999,
dataRelease smallint NOT NULL,
CONSTRAINT PK_P2Detection_objID_detectID PRIMARY KEY
(
`objID`,
detectID
))
"""
CREATE_FRAME_META_TABLE = \
"""
CREATE TABLE P2FrameMeta(
frameID int NOT NULL PRIMARY KEY,
surveyID smallint NOT NULL,
filterID smallint NOT NULL,
cameraID smallint NOT NULL,
telescopeID smallint NOT NULL,
analysisVer smallint NOT NULL,
p1Recip smallint NOT NULL DEFAULT -999,
p2Recip smallint NOT NULL DEFAULT -999,
p3Recip smallint NOT NULL DEFAULT -999,
nP2Images smallint NOT NULL DEFAULT -999,
astroScat real NOT NULL DEFAULT -999,
photoScat real NOT NULL DEFAULT -999,
nAstRef int NOT NULL DEFAULT -999,
nPhoRef int NOT NULL DEFAULT -999,
expStart float NOT NULL DEFAULT -999,
expTime real NOT NULL DEFAULT -999,
airmass real NOT NULL DEFAULT -999,
raBore float NOT NULL DEFAULT -999,
decBore float NOT NULL DEFAULT -999
)
"""
CREATE_IMAGE_META_TABLE = \
"""
CREATE TABLE P2ImageMeta(
imageID bigint NOT NULL PRIMARY KEY,
frameID int NOT NULL,
ccdID smallint NOT NULL,
photoCalID int NOT NULL,
filterID smallint NOT NULL,
bias real NOT NULL DEFAULT -999,
biasScat real NOT NULL DEFAULT -999,
sky real NOT NULL DEFAULT -999,
skyScat real NOT NULL DEFAULT -999,
nDetect int NOT NULL DEFAULT -999,
magSat real NOT NULL DEFAULT -999,
completMag real NOT NULL DEFAULT -999,
astroScat real NOT NULL DEFAULT -999,
photoScat real NOT NULL DEFAULT -999,
nAstRef int NOT NULL DEFAULT -999,
nPhoRef int NOT NULL DEFAULT -999,
nx smallint NOT NULL DEFAULT -999,
ny smallint NOT NULL DEFAULT -999,
psfFwhm real NOT NULL DEFAULT -999,
psfModelID int NOT NULL DEFAULT -999,
psfSigMajor real NOT NULL DEFAULT -999,
psfSigMinor real NOT NULL DEFAULT -999,
psfTheta real NOT NULL DEFAULT -999,
psfExtra1 real NOT NULL DEFAULT -999,
psfExtra2 real NOT NULL DEFAULT -999,
apResid real NOT NULL DEFAULT -999,
dapResid real NOT NULL DEFAULT -999,
detectorID smallint NOT NULL DEFAULT -999,
qaFlags int NOT NULL DEFAULT -999,
detrend1 bigint NOT NULL DEFAULT -999,
detrend2 bigint NOT NULL DEFAULT -999,
detrend3 bigint NOT NULL DEFAULT -999,
detrend4 bigint NOT NULL DEFAULT -999,
detrend5 bigint NOT NULL DEFAULT -999,
detrend6 bigint NOT NULL DEFAULT -999,
detrend7 bigint NOT NULL DEFAULT -999,
detrend8 bigint NOT NULL DEFAULT -999,
photoZero real NOT NULL DEFAULT -999,
photoColor real NOT NULL DEFAULT -999,
projection1 varchar(8000) NOT NULL DEFAULT '-999',
projection2 varchar(8000) NOT NULL DEFAULT '-999',
crval1 float NOT NULL DEFAULT -999,
crval2 float NOT NULL DEFAULT -999,
crpix1 float NOT NULL DEFAULT -999,
crpix2 float NOT NULL DEFAULT -999,
pc001001 float NOT NULL DEFAULT -999,
pc001002 float NOT NULL DEFAULT -999,
pc002001 float NOT NULL DEFAULT -999,
pc002002 float NOT NULL DEFAULT -999,
polyOrder int NOT NULL DEFAULT -999,
pca1x3y0 float NOT NULL DEFAULT -999,
pca1x2y1 float NOT NULL DEFAULT -999,
pca1x1y2 float NOT NULL DEFAULT -999,
pca1x0y3 float NOT NULL DEFAULT -999,
pca1x2y0 float NOT NULL DEFAULT -999,
pca1x1y1 float NOT NULL DEFAULT -999,
pca1x0y2 float NOT NULL DEFAULT -999,
pca2x3y0 float NOT NULL DEFAULT -999,
pca2x2y1 float NOT NULL DEFAULT -999,
pca2x1y2 float NOT NULL DEFAULT -999,
pca2x0y3 float NOT NULL DEFAULT -999,
pca2x2y0 float NOT NULL DEFAULT -999,
pca2x1y1 float NOT NULL DEFAULT -999,
pca2x0y2 float NOT NULL DEFAULT -999
)
"""
|
data/adaptivdesign/django-sellmo/sellmo/apps/product/__init__.py
|
from sellmo.core.registry import Module
ModelsModule = Module.imports('%s.internal.models' % __name__)
IndexesModule = Module.imports('%s.internal.indexes' % __name__)
models = ModelsModule('%s.models' % __name__)
indexes = IndexesModule('%s.indexes' % __name__)
default_app_config = '%s.apps.DefaultConfig' % __name__
|
data/Ramblurr/yubi-goog/test.py
|
import unittest
import binascii
import struct
import yubi_goog
class TestYubiGoog(unittest.TestCase):
def setUp(self):
self.google_secret = "n xu7 v4s qp6 njs gj5"
self.test_secret = binascii.hexlify('12345678901234567890'.encode('ascii'))
self.test_vectors = [{ 'time': 1111111111, 'otp': '050471' },
{ 'time': 1234567890, 'otp': '005924' },
{ 'time': 2000000000, 'otp': '279037' } ]
def test_decode_secret(self):
decoded = yubi_goog.decode_secret(self.google_secret).upper()
self.assertEqual(decoded, "6DE9FAF2507F9A99193D".encode('ascii'))
def test_totp(self):
for pair in self.test_vectors:
time = pair['time']
real_otp = pair['otp']
tm = int(int(time)/30)
tm = struct.pack('>q', tm)
otp = yubi_goog.totp(self.test_secret, tm)
self.assertEqual(otp, real_otp)
if __name__ == '__main__':
unittest.main()
|
data/SuperCowPowers/workbench/workbench/workers/mem_connscan.py
|
''' Memory Image ConnScan worker. This worker utilizes the Rekall Memory Forensic Framework.
See Google Github: http://github.com/google/rekall
All credit for good stuff goes to them, all credit for bad stuff goes to us. :)
'''
import os
import hashlib
import pprint
import collections
from rekall_adapter.rekall_adapter import RekallAdapter
class MemoryImageConnScan(object):
''' This worker computes connscan-data for memory image files. '''
dependencies = ['sample']
def __init__(self):
''' Initialization '''
self.plugin_name = 'connscan'
self.current_table_name = 'connscan'
self.output = {'tables': collections.defaultdict(list)}
self.column_map = {}
def execute(self, input_data):
''' Execute method '''
adapter = RekallAdapter()
adapter.set_plugin_name(self.plugin_name)
rekall_output = adapter.execute(input_data)
for line in rekall_output:
if line['type'] == 'm':
self.output['meta'] = line['data']
elif line['type'] == 's':
self.current_table_name = line['data']['name'][1]
elif line['type'] == 't':
self.column_map = {item['cname']: item['name'] if 'name' in item else item['cname'] for item in line['data']}
elif line['type'] == 'r':
row = RekallAdapter.process_row(line['data'], self.column_map)
self.output['tables'][self.current_table_name].append(row)
else:
print 'Note: Ignoring rekall message of type %s: %s' % (line['type'], line['data'])
return self.output
import pytest
@pytest.mark.xfail
def test():
''' mem_connscan.py: Test '''
import zerorpc
workbench = zerorpc.Client(timeout=300, heartbeat=60)
workbench.connect("tcp://127.0.0.1:4242")
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../data/memory_images/exemplar4.vmem')
if not os.path.isfile(data_path):
print 'Not finding exemplar4.mem... Downloading now...'
import urllib
urllib.urlretrieve('http://s3-us-west-2.amazonaws.com/workbench-data/memory_images/exemplar4.vmem', data_path)
if not os.path.isfile(data_path):
print 'Downloading failed, try it manually...'
print 'wget http://s3-us-west-2.amazonaws.com/workbench-data/memory_images/exemplar4.vmem'
exit(1)
if os.stat(data_path).st_size < 100000:
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../data/memory_images/exemplar4.vmem')
with open(data_path, 'rb') as mem_file:
print 'Corrupt memory image: %s' % mem_file.read()[:500]
print 'Downloading failed, try it manually...'
print 'wget http://s3-us-west-2.amazonaws.com/workbench-data/memory_images/exemplar4.vmem'
exit(1)
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../data/memory_images/exemplar4.vmem')
with open(data_path, 'rb') as mem_file:
raw_bytes = mem_file.read()
md5 = hashlib.md5(raw_bytes).hexdigest()
if not workbench.has_sample(md5):
md5 = workbench.store_sample(open(data_path, 'rb').read(), 'exemplar4.vmem', 'mem')
worker = MemoryImageConnScan()
output = worker.execute({'sample':{'raw_bytes':raw_bytes}})
print '\n<<< Unit Test >>>'
print 'Meta: %s' % output['meta']
for name, table in output['tables'].iteritems():
print '\nTable: %s' % name
pprint.pprint(table)
assert 'Error' not in output
output = workbench.work_request('mem_connscan', md5)['mem_connscan']
print '\n<<< Server Test >>>'
print 'Meta: %s' % output['meta']
for name, table in output['tables'].iteritems():
print '\nTable: %s' % name
pprint.pprint(table)
assert 'Error' not in output
if __name__ == "__main__":
test()
|
data/SalesforceEng/Providence/Empire/bugsystems/jira/JiraAPI.py
|
'''
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
"""
JiraAPI - Sends JQL (Jira Query Language) requests to a Jira instance and returns the results
"""
import ssl
import requests
import json
import urllib
import os
__copyright__ = "2015 Salesforce.com, Inc"
__status__ = "Prototype"
class JiraAPI(object):
def __init__(self, server, credentials):
super(JiraAPI, self).__init__()
self.server = server
self.credentials = credentials
self.verify = None
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
if (self.server == "jira.exacttarget.com:8443"):
self.verify = os.path.join(__location__, 'et-jira-certs.pem')
def fetchCommitDetails(self, url):
r = requests.get(url, auth=self.auth(), verify=self.verify);
if r.headers['x-ratelimit-remaining']:
remaining_requests = int(r.headers['x-ratelimit-remaining'])
if (remaining_requests == 0):
self._no_more_requests_until = datetime.datetime.fromtimestamp(float(r.headers['x-ratelimit-reset']));
return None
if(r.ok):
return r.json()
return None
def jql(self, query, offset=None):
verbose = False
resource_name = "search"
url = "https://%s/rest/api/latest/%s" % (self.server, urllib.quote(resource_name))
params = {"jql":query}
if offset is not None:
params["startAt"] = offset
r = requests.get(url, params=params, headers={ "Authorization":self.credentials.authorizationHeaderValue() }, verify=self.verify)
if(r.ok):
results = r.json()
return results
else:
print "Request failed: ", r.status_code
print r.text
return None
if __name__ == '__main__':
usercredentials_jsonfile = "bugsystems-Jira-usercreds.json"
user_creds_data = open(usercredentials_jsonfile)
user_creds = json.load(user_creds_data)
user = user_creds["user"]
password = user_creds["token"]
server_url = 'https://pardot.atlassian.net'
jira = JiraAPI(server_url, user, password)
results = jira.jql('issuetype = Bug AND labels = trust AND status in (Accepted, "In Progress", Reopened, QA, "Needs Documentation", "On Hold", "QA Confirmed", Backlog, "Under Consideration", Investigation, "Define User Requirements", "Interaction Design", "Ready for Engineering", "UX Review", Done, "In Review", Blocked)')
|
data/Microsoft/ApplicationInsights-Python/tests/applicationinsights_tests/exception_tests/__init__.py
|
from . import TestEnable
|
data/StackStorm/st2/st2actions/tests/unit/test_mistral_v2_rerun.py
|
import copy
import uuid
import mock
import six
import yaml
from mistralclient.api.v2 import executions
from mistralclient.api.v2 import tasks
from mistralclient.api.v2 import workbooks
from mistralclient.api.v2 import workflows
from oslo_config import cfg
import st2tests.config as tests_config
tests_config.parse_args()
cfg.CONF.set_override('retry_exp_msec', 100, group='mistral')
cfg.CONF.set_override('retry_exp_max_msec', 200, group='mistral')
cfg.CONF.set_override('retry_stop_max_msec', 200, group='mistral')
import st2common.bootstrap.runnersregistrar as runners_registrar
from st2actions.runners.localrunner import LocalShellRunner
from st2actions.runners.mistral.v2 import MistralRunner
from st2common.constants import action as action_constants
from st2common.models.api.action import ActionAPI
from st2common.models.db.liveaction import LiveActionDB
from st2common.persistence.action import Action
from st2common.persistence.liveaction import LiveAction
from st2common.services import action as action_service
from st2common.transport.liveaction import LiveActionPublisher
from st2common.transport.publishers import CUDPublisher
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
from tests.unit.base import MockLiveActionPublisher
TEST_FIXTURES = {
'workflows': [
'workflow_v2.yaml',
'workbook_v2_many_workflows.yaml'
],
'actions': [
'workflow_v2.yaml',
'workbook_v2_many_workflows.yaml',
'local.yaml'
]
}
PACK = 'generic'
LOADER = FixturesLoader()
FIXTURES = LOADER.load_fixtures(fixtures_pack=PACK, fixtures_dict=TEST_FIXTURES)
WB1_YAML_FILE_NAME = TEST_FIXTURES['workflows'][1]
WB1_YAML_FILE_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WB1_YAML_FILE_NAME)
WB1_SPEC = FIXTURES['workflows'][WB1_YAML_FILE_NAME]
WB1_YAML = yaml.safe_dump(WB1_SPEC, default_flow_style=False)
WB1_NAME = '%s.%s' % (PACK, WB1_YAML_FILE_NAME.replace('.yaml', ''))
WB1 = workbooks.Workbook(None, {'name': WB1_NAME, 'definition': WB1_YAML})
WB1_MAIN_EXEC = {'id': str(uuid.uuid4()), 'state': 'RUNNING'}
WB1_MAIN_EXEC['workflow_name'] = WB1_NAME + '.main'
WB1_MAIN_EXEC_ERRORED = copy.deepcopy(WB1_MAIN_EXEC)
WB1_MAIN_EXEC_ERRORED['state'] = 'ERROR'
WB1_MAIN_TASK1 = {'id': str(uuid.uuid4()), 'name': 'greet', 'state': 'ERROR'}
WB1_MAIN_TASKS = [tasks.Task(None, WB1_MAIN_TASK1)]
WB1_MAIN_TASK_ID = WB1_MAIN_TASK1['id']
WB1_SUB1_EXEC = {'id': str(uuid.uuid4()), 'state': 'RUNNING', 'task_execution_id': WB1_MAIN_TASK_ID}
WB1_SUB1_EXEC['workflow_name'] = WB1_NAME + '.subflow1'
WB1_SUB1_EXEC_ERRORED = copy.deepcopy(WB1_SUB1_EXEC)
WB1_SUB1_EXEC_ERRORED['state'] = 'ERROR'
WB1_SUB1_TASK1 = {'id': str(uuid.uuid4()), 'name': 'say-greeting', 'state': 'SUCCESS'}
WB1_SUB1_TASK2 = {'id': str(uuid.uuid4()), 'name': 'say-friend', 'state': 'ERROR'}
WB1_SUB1_TASKS = [tasks.Task(None, WB1_SUB1_TASK1), tasks.Task(None, WB1_SUB1_TASK2)]
WF1_YAML_FILE_NAME = TEST_FIXTURES['workflows'][0]
WF1_YAML_FILE_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WF1_YAML_FILE_NAME)
WF1_SPEC = FIXTURES['workflows'][WF1_YAML_FILE_NAME]
WF1_YAML = yaml.safe_dump(WF1_SPEC, default_flow_style=False)
WF1_NAME = '%s.%s' % (PACK, WF1_YAML_FILE_NAME.replace('.yaml', ''))
WF1 = workflows.Workflow(None, {'name': WF1_NAME, 'definition': WF1_YAML})
WF1_EXEC = {'id': str(uuid.uuid4()), 'state': 'ERROR', 'workflow_name': WF1_NAME}
WF1_EXEC_NOT_RERUNABLE = copy.deepcopy(WF1_EXEC)
WF1_EXEC_NOT_RERUNABLE['state'] = 'PAUSED'
WF1_TASK1 = {'id': str(uuid.uuid4()), 'name': 'say-greeting', 'state': 'SUCCESS'}
WF1_TASK2 = {'id': str(uuid.uuid4()), 'name': 'say-friend', 'state': 'SUCCESS'}
WF1_TASKS = [tasks.Task(None, WF1_TASK1), tasks.Task(None, WF1_TASK2)]
ACTION_PARAMS = {'friend': 'Rocky'}
NON_EMPTY_RESULT = 'non-empty'
@mock.patch.object(LocalShellRunner, 'run', mock.
MagicMock(return_value=(action_constants.LIVEACTION_STATUS_SUCCEEDED,
NON_EMPTY_RESULT, None)))
@mock.patch.object(CUDPublisher, 'publish_update', mock.MagicMock(return_value=None))
@mock.patch.object(CUDPublisher, 'publish_create',
mock.MagicMock(side_effect=MockLiveActionPublisher.publish_create))
@mock.patch.object(LiveActionPublisher, 'publish_state',
mock.MagicMock(side_effect=MockLiveActionPublisher.publish_state))
class MistralRunnerTest(DbTestCase):
@classmethod
def setUpClass(cls):
super(MistralRunnerTest, cls).setUpClass()
runners_registrar.register_runner_types()
for _, fixture in six.iteritems(FIXTURES['actions']):
instance = ActionAPI(**fixture)
Action.add_or_update(ActionAPI.to_model(instance))
def setUp(self):
super(MistralRunnerTest, self).setUp()
cfg.CONF.set_override('api_url', 'http://0.0.0.0:9101', group='auth')
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
MistralRunner, 'resume',
mock.MagicMock(
return_value=(action_constants.LIVEACTION_STATUS_RUNNING,
{'tasks': []},
{'execution_id': str(uuid.uuid4())})
)
)
def test_resume_option(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
self.assertFalse(MistralRunner.resume.called)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['x']
}
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)
task_specs = {
'x': {
'reset': False
}
}
MistralRunner.resume.assert_called_with(ex_ref=execution1, task_specs=task_specs)
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
MistralRunner, 'resume',
mock.MagicMock(
return_value=(action_constants.LIVEACTION_STATUS_RUNNING,
{'tasks': []},
{'execution_id': str(uuid.uuid4())})
)
)
def test_resume_option_reset_tasks(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
self.assertFalse(MistralRunner.resume.called)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['x', 'y'],
'reset': ['y']
}
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)
task_specs = {
'x': {
'reset': False
},
'y': {
'reset': True
}
}
MistralRunner.resume.assert_called_with(ex_ref=execution1, task_specs=task_specs)
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC_NOT_RERUNABLE)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC_NOT_RERUNABLE)))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(return_value=WF1_TASKS))
def test_resume_workflow_not_in_rerunable_state(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['say-friend']
}
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_FAILED)
self.assertIn('not in a rerunable state', liveaction2.result.get('error'))
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'list',
mock.MagicMock(return_value=[executions.Execution(None, WF1_EXEC)]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(return_value=WF1_TASKS))
def test_resume_tasks_not_in_rerunable_state(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['say-friend']
}
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_FAILED)
self.assertIn('Unable to identify rerunable', liveaction2.result.get('error'))
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workflows.WorkflowManager, 'create',
mock.MagicMock(return_value=[WF1]))
@mock.patch.object(
executions.ExecutionManager, 'list',
mock.MagicMock(return_value=[executions.Execution(None, WF1_EXEC)]))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(return_value=WF1_TASKS))
def test_resume_unidentified_tasks(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['x']
}
}
liveaction2 = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_FAILED)
self.assertIn('Unable to identify', liveaction2.result.get('error'))
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workbooks.WorkbookManager, 'create',
mock.MagicMock(return_value=WB1))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC_ERRORED)))
@mock.patch.object(
executions.ExecutionManager, 'list',
mock.MagicMock(
return_value=[
executions.Execution(None, WB1_MAIN_EXEC_ERRORED),
executions.Execution(None, WB1_SUB1_EXEC_ERRORED)]))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(side_effect=[WB1_MAIN_TASKS, WB1_SUB1_TASKS]))
@mock.patch.object(
tasks.TaskManager, 'rerun',
mock.MagicMock(return_value=None))
def test_resume_subworkflow_task(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WB1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['greet.say-friend']
}
}
liveaction2 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)
expected_env = {
'st2_liveaction_id': str(liveaction2.id),
'st2_execution_id': str(execution2.id),
'__actions': {
'st2.action': {
'st2_context': {
'endpoint': 'http://0.0.0.0:9101/v1/actionexecutions',
'notify': {},
'parent': {
're-run': context['re-run'],
'execution_id': str(execution2.id)
},
'skip_notify_tasks': []
}
}
},
'st2_action_api_url': 'http://0.0.0.0:9101/v1'
}
tasks.TaskManager.rerun.assert_called_with(
WB1_SUB1_TASK2['id'],
reset=False,
env=expected_env
)
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workbooks.WorkbookManager, 'create',
mock.MagicMock(return_value=WB1))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC_ERRORED)))
@mock.patch.object(
executions.ExecutionManager, 'list',
mock.MagicMock(
return_value=[
executions.Execution(None, WB1_MAIN_EXEC_ERRORED),
executions.Execution(None, WB1_SUB1_EXEC_ERRORED)]))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(side_effect=[WB1_MAIN_TASKS, WB1_SUB1_TASKS]))
def test_resume_unidentified_subworkflow_task(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WB1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['greet.x']
}
}
liveaction2 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_FAILED)
self.assertIn('Unable to identify', liveaction2.result.get('error'))
@mock.patch.object(
workflows.WorkflowManager, 'list',
mock.MagicMock(return_value=[]))
@mock.patch.object(
workflows.WorkflowManager, 'get',
mock.MagicMock(return_value=WF1))
@mock.patch.object(
workbooks.WorkbookManager, 'create',
mock.MagicMock(return_value=WB1))
@mock.patch.object(
executions.ExecutionManager, 'create',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC)))
@mock.patch.object(
executions.ExecutionManager, 'get',
mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC_ERRORED)))
@mock.patch.object(
executions.ExecutionManager, 'list',
mock.MagicMock(
return_value=[
executions.Execution(None, WB1_MAIN_EXEC_ERRORED),
executions.Execution(None, WB1_SUB1_EXEC_ERRORED)]))
@mock.patch.object(
tasks.TaskManager, 'list',
mock.MagicMock(side_effect=[WB1_MAIN_TASKS, WB1_SUB1_TASKS]))
@mock.patch.object(
tasks.TaskManager, 'rerun',
mock.MagicMock(return_value=None))
def test_resume_and_reset_subworkflow_task(self):
MistralRunner.entry_point = mock.PropertyMock(return_value=WB1_YAML_FILE_PATH)
liveaction1 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS)
liveaction1, execution1 = action_service.request(liveaction1)
context = {
're-run': {
'ref': execution1.id,
'tasks': ['greet.say-friend'],
'reset': ['greet.say-friend']
}
}
liveaction2 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS, context=context)
liveaction2, execution2 = action_service.request(liveaction2)
liveaction2 = LiveAction.get_by_id(str(liveaction2.id))
self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)
expected_env = {
'st2_liveaction_id': str(liveaction2.id),
'st2_execution_id': str(execution2.id),
'__actions': {
'st2.action': {
'st2_context': {
'endpoint': 'http://0.0.0.0:9101/v1/actionexecutions',
'notify': {},
'parent': {
're-run': context['re-run'],
'execution_id': str(execution2.id)
},
'skip_notify_tasks': []
}
}
},
'st2_action_api_url': 'http://0.0.0.0:9101/v1'
}
tasks.TaskManager.rerun.assert_called_with(
WB1_SUB1_TASK2['id'],
reset=True,
env=expected_env
)
|
data/Jintin/andle/install.py
|
import os
os.system("sudo python setup.py install")
|
data/PythonProgramming/Beginning-Game-Development-with-Python-and-Pygame/gameobjects/__init__.py
|
__all__ = [
'vector2',
'vector3',
'util',
'sphere',
'matrix44',
'color',
'gametime',
'grid'
]
__version__ = "0.0.3"
|
data/QuantEcon/QuantEcon.py/quantecon/tests/test_robustlq.py
|
"""
Author: Chase Coleman
Filename: test_lqcontrol
Tests for lqcontrol.py file
"""
import sys
import os
import unittest
import numpy as np
from scipy.linalg import LinAlgError
from numpy.testing import assert_allclose
from quantecon.lqcontrol import LQ
from quantecon.robustlq import RBLQ
class TestRBLQControl(unittest.TestCase):
def setUp(self):
a_0 = 100
a_1 = 0.5
rho = 0.9
sigma_d = 0.05
beta = 0.95
c = 2
gamma = 50.0
theta = 0.002
ac = (a_0 - c) / 2.0
R = np.array([[0, ac, 0],
[ac, -a_1, 0.5],
[0., 0.5, 0]])
R = -R
Q = gamma / 2
A = np.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., rho]])
B = np.array([[0.],
[1.],
[0.]])
C = np.array([[0.],
[0.],
[sigma_d]])
self.rblq_test = RBLQ(Q, R, A, B, C, beta, theta)
self.lq_test = LQ(Q, R, A, B, C, beta)
self.Fr, self.Kr, self.Pr = self.rblq_test.robust_rule()
def tearDown(self):
del self.rblq_test
def test_robust_rule_vs_simple(self):
rblq = self.rblq_test
Fr, Kr, Pr = self.Fr, self.Kr, self.Pr
Fs, Ks, Ps = rblq.robust_rule_simple(P_init=Pr, tol=1e-12)
assert_allclose(Fr, Fs, rtol=1e-4)
assert_allclose(Kr, Ks, rtol=1e-4)
assert_allclose(Pr, Ps, rtol=1e-4)
def test_f2k_and_k2f(self):
rblq = self.rblq_test
Fr, Kr, Pr = self.Fr, self.Kr, self.Pr
K_f2k, P_f2k = rblq.F_to_K(Fr)
F_k2f, P_k2f = rblq.K_to_F(Kr)
assert_allclose(K_f2k, Kr, rtol=1e-4)
assert_allclose(F_k2f, Fr, rtol=1e-4)
assert_allclose(P_f2k, P_k2f, rtol=1e-4)
def test_evaluate_F(self):
rblq = self.rblq_test
Fr, Kr, Pr = self.Fr, self.Kr, self.Pr
Kf, Pf, df, Of, of = rblq.evaluate_F(Fr)
assert_allclose(Pf, Pr)
assert_allclose(Kf, Kr)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestRBLQControl)
unittest.TextTestRunner(verbosity=2, stream=sys.stderr).run(suite)
|
data/OpenKMIP/PyKMIP/kmip/demos/pie/register_opaque_object.py
|
import logging
import sys
from kmip.core import enums
from kmip.demos import utils
from kmip.pie import client
from kmip.pie import objects
if __name__ == '__main__':
logger = utils.build_console_logger(logging.INFO)
parser = utils.build_cli_parser()
opts, args = parser.parse_args(sys.argv[1:])
config = opts.config
value = b'\x53\x65\x63\x72\x65\x74\x50\x61\x73\x73\x77\x6F\x72\x64'
opaque_type = enums.OpaqueDataType.NONE
name = 'Demo Opaque Object'
obj = objects.OpaqueObject(value, opaque_type, name)
with client.ProxyKmipClient(config=config) as client:
try:
uid = client.register(obj)
logger.info("Successfully registered opaque object with ID: "
"{0}".format(uid))
except Exception as e:
logger.error(e)
|
data/Netflix-Skunkworks/zerotodocker/genie/2.1.0/example/run_pig_job_2.py
|
import genie2.client.wrapper
import genie2.model.ClusterCriteria
import genie2.model.Job
import genie2.model.FileAttachment
import time
genie = genie2.client.wrapper.Genie2("http://localhost:8080/genie",
genie2.client.wrapper.RetryPolicy(
tries=8, none_on_404=True, no_retry_http_codes=range(400, 500)
))
job = genie2.model.Job.Job()
job.name = "GenieDockerExamplePigJob2"
job.user = "root"
job.version = "0.14.0"
job.clusterCriterias = list()
cluster_criteria = genie2.model.ClusterCriteria.ClusterCriteria()
criteria = set()
criteria.add("sched:adhoc")
criteria.add("type:yarn")
cluster_criteria.tags = criteria
job.clusterCriterias.append(cluster_criteria)
command_criteria = set()
command_criteria.add("type:pig")
job.commandCriteria = command_criteria
job.fileDependencies = \
"file:///apps/genie/pig/0.14.0/tutorial/script2-hadoop.pig,file:///apps/genie/pig/0.14.0/tutorial/tutorial.jar"
job.commandArgs = "script2-hadoop.pig"
job = genie.submitJob(job)
while job.status != "SUCCEEDED" and job.status != "KILLED" and job.status != "FAILED":
print "Job " + job.id + " is " + job.status
time.sleep(10)
job = genie.getJob(job.id)
print "Job " + job.id + " finished with status " + job.status
|
data/Net-ng/kansha/kansha/events.py
|
class EventHandlerMixIn(object):
"""
Mix-in that implements:
- `emit_event`, to emit an event;
- `handle_event`, a callback for comp.on_answer if comp is expected to emit events.
`handle_event` calls a method `on_event(event)`
on `self` (if exists) to handle the event and then systematically bubbles the event up.
`handle_event` returns the return value of `on_event`
if any, or the return value of the upper levels.
"""
def emit_event(self, comp, kind, data=None):
event = kind(data, source=[self])
return comp.answer(event)
def handle_event(self, comp, event):
local_res = None
local_handler = getattr(self, 'on_event', None)
if local_handler:
local_res = local_handler(comp, event)
event.append(self)
upper_res = comp.answer(event)
return local_res or upper_res
class Event(object):
"""Can be derived"""
def __init__(self, data, source=[]):
"""
`data` is a payload specific to the kind of event.
`source` is a list of component business objects. The first one is the emitter.
Each traversed component must append itself with `append` (see below).
"""
self._source = source
self.data = data
@property
def source(self):
return self._source.copy()
@property
def emitter(self):
return self._source[0]
@property
def last_relay(self):
return self._source[-1]
def is_(self, kind):
return type(self) is kind
def is_kind_of(self, kind):
return isinstance(self, kind)
def append(self, relay):
self._source.append(relay)
def cast_as(self, sub_kind):
return sub_kind(self.data, self._source)
class ColumnDeleted(Event):
"""
The user clicked on the 'delete column' action.
`data` is the column component (component.Component).
"""
pass
class CardClicked(Event):
"""
The user clicked on a card.
`data` is the card component (component.Component)
"""
pass
class PopinClosed(Event):
"""
The Popin has closed.
`data` is the component.Component containing the Popin.
"""
pass
class CardEditorClosed(PopinClosed):
"""
In the particular case when the Popin contains the card editor.
`data` is the component.Component containing the Popin.
"""
pass
class CardArchived(Event):
"""
The user clicked on the `Delete` button in the card editor.
No payload.
"""
pass
class SearchIndexUpdated(Event):
"""
Some operations have been committed on the search index.
No payload.
"""
pass
class CardDisplayed(Event):
"""
A card has just been (re-)displayed on the board (default form).
No payload.
"""
pass
class BoardAccessChanged(Event):
"""
The access conditions to the board changed.
"""
class BoardDeleted(BoardAccessChanged):
"""
The board has been (or is about to be) deleted.
No payload.
"""
class BoardArchived(BoardAccessChanged):
"""
The board has been archived.
No payload.
"""
class BoardRestored(BoardAccessChanged):
"""
The board has been restored from archive.
"""
class BoardLeft(BoardAccessChanged):
"""
The user has left the board.
No payload.
"""
class ParentTitleNeeded(Event):
"""The emitter needs context from parent in the form of a title string."""
pass
class NewTemplateRequested(Event):
"""
The user requested that a new template is created from the emitter.
Payload is tuple (template_title, template_description, shared_flag).
The receiver returns a new Template on success.
"""
pass
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/test/test_hasconstraints.py
|
import numpy as np
import unittest
from openmdao.main.api import Assembly, Component, Driver, set_as_top
from openmdao.main.datatypes.api import Float, Array
from openmdao.main.hasconstraints import HasConstraints, HasEqConstraints, \
HasIneqConstraints, Constraint, Has2SidedConstraints
from openmdao.main.interfaces import IHas2SidedConstraints, implements
from openmdao.main.pseudocomp import SimpleEQConPComp, SimpleEQ0PComp
from openmdao.main.test.simpledriver import SimpleDriver
from openmdao.test.execcomp import ExecComp
from openmdao.units.units import PhysicalQuantity
from openmdao.util.decorators import add_delegate
from openmdao.util.testutil import assert_rel_error
@add_delegate(HasConstraints)
class MyDriver(Driver):
pass
@add_delegate(HasEqConstraints)
class MyEqDriver(Driver):
pass
@add_delegate(HasIneqConstraints)
class MyInEqDriver(Driver):
pass
@add_delegate(HasConstraints, Has2SidedConstraints)
class My2SDriver(Driver):
implements(IHas2SidedConstraints)
class SimpleUnits(Component):
a = Float(iotype='in', units='inch')
b = Float(iotype='in', units='inch')
c = Float(iotype='out', units='ft')
d = Float(iotype='out', units='ft')
arr = Array([1.,2.,3.], iotype='in', units='inch')
arr_out = Array([1.,2.,3.], iotype='out', units='ft')
def __init__(self):
super(SimpleUnits, self).__init__()
self.a = 1
self.b = 2
self.c = 3
self.d = -1
def execute(self):
self.c = PhysicalQuantity(self.a + self.b, 'inch').in_units_of('ft').value
self.d = PhysicalQuantity(self.a - self.b, 'inch').in_units_of('ft').value
class Simple(Component):
a = Float(iotype='in')
b = Float(iotype='in')
c = Float(iotype='out')
d = Float(iotype='out')
def __init__(self):
super(Simple, self).__init__()
self.a = 1
self.b = 2
self.c = 3
self.d = -1
def execute(self):
self.c = self.a + self.b
self.d = self.a - self.b
def list_deriv_vars(self):
return ('a', 'b'), ('c', 'd')
def provideJ(self):
der = 1.0
return np.array([[der, der], [der, der]])
class HasConstraintsTestCase(unittest.TestCase):
def setUp(self):
self.asm = set_as_top(Assembly())
self.asm.add('comp1', Simple())
self.asm.add('comp2', Simple())
self.asm.add('comp3', SimpleUnits())
self.asm.add('comp4', SimpleUnits())
def test_list_constraints(self):
drv = self.asm.add('driver', MyDriver())
self.asm.run()
drv.add_constraint('comp1.a < comp1.b')
drv.add_constraint('comp1.c = comp1.d')
self.assertEqual(drv.list_constraints(),
['comp1.c=comp1.d', 'comp1.a<comp1.b'])
def test_list_eq_constraints(self):
drv = self.asm.add('driver', MyEqDriver())
drv.add_constraint('comp1.a = comp1.b')
drv.add_constraint('comp1.c = comp1.d')
self.assertEqual(drv.list_constraints(),
['comp1.a=comp1.b','comp1.c=comp1.d'])
def test_list_ineq_constraints(self):
drv = self.asm.add('driver', MyDriver())
drv.add_constraint('comp1.a < comp1.b')
drv.add_constraint('comp1.c >= comp1.d')
self.assertEqual(drv.list_constraints(),
['comp1.a<comp1.b','comp1.c>=comp1.d'])
def _check_ineq_add_constraint(self, drv):
self.asm.add('driver', drv)
try:
drv.add_constraint('comp1.b==comp1.a')
except Exception as err:
self.assertEqual(str(err), "driver: Constraints require an explicit comparator (=, <, >, <=, or >=)")
else:
self.fail("Exception expected")
self.assertEqual(len(drv.get_ineq_constraints()), 0)
drv.add_constraint(' comp1.a > comp1.b')
try:
drv.add_constraint('comp1.a>comp1.b')
except Exception as err:
self.assertEqual(str(err),
'driver: A constraint of the form "comp1.a>comp1.b" already exists '
'in the driver. Add failed.')
else:
self.fail("Exception Expected")
self.assertEqual(len(drv.get_ineq_constraints()), 1)
drv.remove_constraint(' comp1.a> comp1.b ')
self.assertEqual(len(drv.get_ineq_constraints()), 0)
try:
drv.remove_constraint('comp1.bogus < comp1.d')
except Exception as err:
self.assertEqual(str(err),
"driver: Constraint 'comp1.bogus<comp1.d' was not found. Remove failed.")
else:
self.fail("Exception expected")
drv.add_constraint(' comp1.a > comp1.b')
self.assertEqual(len(drv.get_ineq_constraints()), 1)
drv.add_constraint('comp1.b < comp1.c', name='foobar')
self.assertEqual(len(drv.get_ineq_constraints()), 2)
try:
drv.add_constraint('comp1.b < comp1.a', name='foobar')
except Exception as err:
self.assertEqual(str(err), 'driver: A constraint named "foobar" already exists in the driver. Add failed.')
else:
self.fail("Exception expected")
self.assertEqual(len(drv.get_ineq_constraints()), 2)
drv.remove_constraint('foobar')
self.assertEqual(len(drv.get_ineq_constraints()), 1)
drv.clear_constraints()
self.assertEqual(len(drv.get_ineq_constraints()), 0)
try:
drv.add_constraint('comp1.b < comp1.qq')
except ValueError as err:
self.assertEqual(str(err),
"Right hand side of constraint 'comp1.b < comp1.qq' has invalid variables 'comp1.qq'")
else:
self.fail('expected ValueError')
def _check_eq_add_constraint(self, drv):
self.asm.add('driver', drv)
self.assertEqual(len(drv.get_eq_constraints()), 0)
self.assertEqual(len(drv.get_eq_constraints()), 0)
drv.add_constraint('comp1.c = comp1.d ')
self.assertEqual(len(drv.get_eq_constraints()), 1)
try:
drv.add_constraint('comp1.c=comp1.d')
except Exception as err:
self.assertEqual(str(err),
'driver: A constraint of the form "comp1.c=comp1.d" already exists '
'in the driver. Add failed.')
else:
self.fail("Exception Expected")
drv.remove_constraint(' comp1.c=comp1.d')
self.assertEqual(len(drv.get_eq_constraints()), 0)
try:
drv.remove_constraint('comp1.bogus = comp1.d')
except Exception as err:
self.assertEqual(str(err),
"driver: Constraint 'comp1.bogus=comp1.d' was not found. Remove failed.")
else:
self.fail("Exception expected")
self.assertEqual(len(drv.get_eq_constraints()), 0)
drv.add_constraint('comp1.c =comp1.d ')
self.assertEqual(len(drv.get_eq_constraints()), 1)
drv.add_constraint('comp1.b = comp1.c', name='foobar')
self.assertEqual(len(drv.get_eq_constraints()), 2)
try:
drv.add_constraint('comp1.b = comp1.a', name='foobar')
except Exception as err:
self.assertEqual(str(err), 'driver: A constraint named "foobar" already exists in the driver. Add failed.')
else:
self.fail("Exception expected")
drv.remove_constraint('foobar')
self.assertEqual(len(drv.get_eq_constraints()), 1)
drv.clear_constraints()
self.assertEqual(len(drv.get_eq_constraints()), 0)
try:
drv.add_constraint('comp1.qq = comp1.b')
except ValueError as err:
self.assertEqual(str(err),
"Left hand side of constraint 'comp1.qq = comp1.b' has invalid variables 'comp1.qq'")
else:
self.fail('expected ValueError')
def _check_eq_eval_constraints(self, drv):
self.asm.add('driver', drv)
vals = drv.eval_eq_constraints()
self.assertEqual(len(vals), 0)
drv.add_constraint('comp1.c = comp1.d ')
self.asm.comp1.a = 4
self.asm.comp1.b = 5
self.asm.comp1.c = 9
self.asm.comp1.d = -1
self.asm.run()
vals = drv.eval_eq_constraints()
self.assertEqual(len(vals), 1)
self.assertEqual(vals[0], 10.)
vals = drv.get_eq_constraints()
self.assertEqual(len(vals), 1)
self.assertTrue(isinstance(vals['comp1.c=comp1.d'], Constraint))
def _check_ineq_eval_constraints(self, drv):
self.asm.add('driver', drv)
vals = drv.eval_ineq_constraints()
self.assertEqual(len(vals), 0)
drv.add_constraint(' comp1.a > comp1.b')
self.asm.comp1.a = 4
self.asm.comp1.b = 5
self.asm.comp1.c = 9
self.asm.comp1.d = -1
self.asm.run()
vals = drv.eval_ineq_constraints()
self.assertEqual(len(vals), 1)
self.assertEqual(vals[0], 1)
vals = drv.get_ineq_constraints()
self.assertEqual(len(vals), 1)
self.assertTrue(isinstance(vals['comp1.a>comp1.b'], Constraint))
def test_constraint_scaler_adder(self):
drv = self.asm.add('driver', MyDriver())
self.asm.comp1.a = 3000
self.asm.comp1.b = 5000
drv.add_constraint('(comp1.a-4000.)/1000.0 < comp1.b')
self.asm.run()
result = drv.eval_ineq_constraints()
self.assertEqual(result[0], -5001.0)
drv.remove_constraint('(comp1.a-4000.)/1000.0 < comp1.b')
result = drv.eval_ineq_constraints()
self.assertEqual(result, [])
def test_add_constraint_eq_eq(self):
drv = MyDriver()
self.asm.add('driver', drv)
try:
drv.add_constraint('comp1.b==comp1.a')
except Exception as err:
self.assertEqual(str(err), "driver: Constraints require an explicit comparator (=, <, >, <=, or >=)")
else:
self.fail("Exception expected")
def test_add_constraint(self):
drv = MyDriver()
self._check_eq_add_constraint(drv)
self._check_ineq_add_constraint(drv)
def test_add_eq_constraint(self):
self._check_eq_add_constraint(MyEqDriver())
def test_add_ineq_constraint(self):
self._check_ineq_add_constraint(MyInEqDriver())
def test_implicit_constraint(self):
drv = self.asm.add('driver', MyEqDriver())
try:
drv.add_constraint('comp1.a + comp1.b')
except ValueError, err:
self.assertEqual(str(err),
"driver: Constraints require an explicit comparator (=, <, >, <=, or >=)")
else:
self.fail('ValueError expected')
def test_eval_constraint(self):
self._check_eq_eval_constraints(MyDriver())
self._check_ineq_eval_constraints(MyDriver())
def test_eval_eq_constraint(self):
self._check_eq_eval_constraints(MyEqDriver())
def test_eval_ineq_constraint(self):
self._check_ineq_eval_constraints(MyInEqDriver())
def test_pseudocomps(self):
self.asm.add('driver', MyDriver())
self.asm.driver.workflow.add(['comp1','comp2'])
self.asm._setup()
self.assertEqual(self.asm._depgraph.list_connections(),
[])
self.asm.driver.add_constraint('comp1.c-comp2.a>5.')
self.asm._setup()
self.assertEqual(self.asm._pseudo_0._orig_expr, '5.-(comp1.c-comp2.a)')
self.assertEqual(set(self.asm._depgraph.list_connections(drivers=False)),
set([('comp2.a', '_pseudo_0.in1'), ('comp1.c', '_pseudo_0.in0')]))
self.asm.driver.remove_constraint('comp1.c-comp2.a>5.')
self.asm._setup()
self.assertEqual(self.asm._depgraph.list_connections(drivers=False), [])
self.asm.driver.add_constraint('comp1.c > 0.')
self.asm._setup()
self.assertEqual(set(self.asm._depgraph.list_connections(drivers=False)),
set([('comp1.c', '_pseudo_1.in0')]))
self.asm._setup()
self.assertEqual(self.asm._pseudo_1._orig_expr, '-(comp1.c)')
self.asm.driver.add_constraint('comp1.c-comp2.a<5.')
self.asm._setup()
self.assertEqual(self.asm._pseudo_2._orig_expr, 'comp1.c-comp2.a-(5.)')
self.asm.driver.add_constraint('comp1.c < 0.')
self.asm._setup()
self.assertEqual(self.asm._pseudo_3._orig_expr, 'comp1.c')
self.asm.driver.add_constraint('comp3.c-comp4.a>5.')
self.asm._setup()
self.assertEqual(self.asm._pseudo_4._orig_expr, '5.-(comp3.c-comp4.a)')
self.asm.driver.clear_constraints()
self.asm.comp1.a = 2
self.asm.comp1.b = 1
self.asm.comp2.a = 4
self.asm.comp2.b = 2
self.asm.driver.add_constraint('comp2.c - 2*comp1.d > 5')
self.asm.driver.add_constraint('comp2.c - 2*comp1.d < 5')
self.asm.driver.add_constraint('comp2.d < 0')
self.asm.run()
self.assertEqual(self.asm._pseudo_5.out0, 1.0)
self.assertEqual(self.asm._pseudo_6.out0, -1.0)
self.assertEqual(self.asm._pseudo_7.out0, 2.0)
def test_custom_pseudocomp_creation(self):
self.asm.add('driver', MyDriver())
arg = {}
result = {}
self.asm.driver.add_constraint('comp1.c = 0')
self.asm._setup()
self.assertEqual(self.asm._pseudo_0.__class__, SimpleEQ0PComp)
self.asm.run()
arg['in0'] = np.array([3.3])
result['out0'] = np.array([0.0])
self.asm._pseudo_0.apply_deriv(arg, result)
self.assertEqual(result['out0'][0], 3.3)
self.asm.driver.add_constraint('comp1.d = 5.4')
self.asm._setup()
self.assertEqual(self.asm._pseudo_1.__class__, SimpleEQ0PComp)
self.asm.run()
arg['in0'] = np.array([3.3])
result['out0'] = np.array([0.0])
self.asm._pseudo_1.apply_deriv(arg, result)
self.assertEqual(result['out0'][0], 3.3)
self.asm.driver.add_constraint('comp2.c = comp3.a')
self.asm._setup()
self.assertEqual(self.asm._pseudo_2.__class__, SimpleEQConPComp)
self.asm.run()
arg['in0'] = np.array([7.2])
arg['in1'] = np.array([3.1])
result['out0'] = np.array([0.0])
self.asm._pseudo_2.apply_deriv(arg, result)
self.assertEqual(result['out0'][0], 4.1)
self.asm.driver.clear_constraints()
self.asm.driver.add_constraint('comp2.c - comp3.a=0.0')
self.asm._setup()
self.assertEqual(self.asm._pseudo_3.__class__, SimpleEQConPComp)
self.asm.run()
arg['in0'] = np.array([7.2])
arg['in1'] = np.array([3.1])
result['out0'] = np.array([0.0])
self.asm._pseudo_3.apply_deriv(arg, result)
self.assertEqual(result['out0'][0], 4.1)
self.asm.driver.clear_constraints()
self.asm.driver.add_constraint('0=comp2.c - comp3.a')
self.asm._setup()
self.assertEqual(self.asm._pseudo_4.__class__, SimpleEQConPComp)
self.asm.run()
arg['in0'] = np.array([7.2])
arg['in1'] = np.array([3.1])
result['out0'] = np.array([0.0])
self.asm._pseudo_4.apply_deriv(arg, result)
self.assertEqual(result['out0'][0], 4.1)
def test_custom_jacobian(self):
class AComp(Component):
x = Array([[1.0, 3.0], [-2.0, 4.0]], iotype='in')
y = Array(np.zeros((2, 2)), iotype='out')
def __init__(self):
super(AComp, self).__init__()
self.J = np.array([[3.5, -2.5, 1.5, 4.0],
[4.0, 2.0, -1.1, 3.4],
[7.7, 6.6, 4.4, 1.1],
[0.1, 3.3, 6.8, -5.5]])
def execute(self):
""" Run arraycomp"""
y = self.J.dot(self.x.flatten())
self.y = y.reshape((2,2))
def list_deriv_vars(self):
""" x and y """
input_keys = ('x',)
output_keys = ('y',)
return input_keys, output_keys
def provideJ(self):
"""Analytical first derivatives"""
return self.J
def fake_jac():
""" Returns a User-defined Jacobian. The values are
totally wrong to facilitate testing. """
jacs = {}
jacs['comp.x'] = np.array([[100.0, 101, 102, 103],
[104, 105, 106, 107],
[108, 109, 110, 111],
[112, 113, 114, 115]])
return jacs
top = set_as_top(Assembly())
top.add('driver', SimpleDriver())
top.add('comp', AComp())
top.driver.workflow.add('comp')
top.driver.add_parameter('comp.x', low=10, high=10)
top.driver.add_constraint('comp.y < 1', jacs=fake_jac)
top._setup()
top.run()
J = top.driver.calc_gradient(mode='forward', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - top.comp.J)
assert_rel_error(self, diff.max(), 0.0, 1e-4)
J = top.driver.calc_gradient(mode='adjoint', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - fake_jac()['comp.x'])
assert_rel_error(self, diff.max(), 0.0, 1e-4)
top.driver.clear_constraints()
top._pseudo_count = 0
top.driver.add_constraint('comp.y = 1', jacs=fake_jac)
top._setup()
top.run()
J = top.driver.calc_gradient(mode='forward', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - top.comp.J)
assert_rel_error(self, diff.max(), 0.0, 1e-4)
J = top.driver.calc_gradient(mode='adjoint', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - fake_jac()['comp.x'])
assert_rel_error(self, diff.max(), 0.0, 1e-4)
top.driver.clear_constraints()
top._pseudo_count = 0
top.driver.add_constraint('0 < comp.y < 1', jacs=fake_jac)
top._setup()
top.run()
J = top.driver.calc_gradient(mode='forward', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - top.comp.J)
assert_rel_error(self, diff.max(), 0.0, 1e-4)
J = top.driver.calc_gradient(mode='adjoint', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - fake_jac()['comp.x'])
assert_rel_error(self, diff.max(), 0.0, 1e-4)
top.driver.clear_constraints()
top._pseudo_count = 0
top.driver.add_constraint('comp.y = 1', jacs=fake_jac)
top.driver.gradient_options.lin_solver = 'linear_gs'
top._setup()
top.run()
J = top.driver.calc_gradient(mode='forward', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - top.comp.J)
assert_rel_error(self, diff.max(), 0.0, 1e-4)
J = top.driver.calc_gradient(mode='adjoint', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - fake_jac()['comp.x'])
assert_rel_error(self, diff.max(), 0.0, 1e-4)
def fake_jac2():
""" Returns a User-defined Jacobian. The values are
totally wrong to facilitate testing. """
jacs = {}
jacs['Junk'] = np.array([[100.0, 101, 102, 103],
[104, 105, 106, 107],
[108, 109, 110, 111],
[112, 113, 114, 115]])
return jacs
top.driver.clear_constraints()
top._pseudo_count = 0
top.driver.add_constraint('comp.y = 1', jacs=fake_jac2)
top._setup()
top.run()
J = top.driver.calc_gradient(mode='forward', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
diff = np.abs(J - top.comp.J)
assert_rel_error(self, diff.max(), 0.0, 1e-4)
J = top.driver.calc_gradient(mode='adjoint', return_format='dict')
J = J['_pseudo_0.out0']['comp.x']
J_abs = np.abs(J)
assert_rel_error(self, J_abs.max(), 0.0, 1e-4)
class Has2SidedConstraintsTestCase(unittest.TestCase):
def setUp(self):
self.asm = set_as_top(Assembly())
self.asm.add('comp1', Simple())
self.asm.add('comp2', Simple())
self.asm.add('comp3', SimpleUnits())
self.asm.add('comp4', SimpleUnits())
def test_unsupported(self):
drv = self.asm.add('driver', MyDriver())
self.asm.run()
try:
drv.add_constraint('-98 < comp1.a < 101')
except AttributeError as err:
self.assertEqual(str(err), "driver: Double-sided constraints are not supported on this driver.")
else:
self.fail("Exception expected")
def test_get_2sided_constraints(self):
drv = self.asm.add('driver', My2SDriver())
drv.add_constraint('-44.1 < comp1.a < 13.0')
drv.add_constraint('77.0 < comp1.c < 79.0')
self.asm.run()
cons = drv.get_2sided_constraints()
self.assertTrue(len(cons) == 2)
con1 = cons['-44.1<comp1.a<13.0']
self.assertEqual(self.asm.comp1.a, con1.evaluate(self.asm)[0])
self.assertEqual(con1.low, -44.1)
self.assertEqual(con1.high, 13.0)
con1 = cons['77.0<comp1.c<79.0']
self.assertEqual(self.asm.comp1.c, con1.evaluate(self.asm)[0])
self.assertEqual(con1.low, 77.0)
self.assertEqual(con1.high, 79.0)
cons = drv.get_constraints()
self.assertTrue(len(cons) == 0)
def test_list_constraints(self):
drv = self.asm.add('driver', My2SDriver())
drv.add_constraint('-44.1 < comp1.a < 13.0')
drv.add_constraint('77.0 < comp1.c')
self.asm.run()
cons = drv.list_constraints()
self.assertTrue('-44.1<comp1.a<13.0' in cons)
self.assertTrue('77.0<comp1.c' in cons)
def test_gradient(self):
drv = self.asm.add('driver', My2SDriver())
drv.add_constraint('-44.1 < comp1.a < 13.0')
drv.add_constraint('77.0 < -2.5*comp1.a')
drv.add_constraint('55.0 > comp1.a > 52.0')
drv.add_constraint('0.1 < 3.0*comp1.a < 1.5')
self.asm.run()
J = drv.calc_gradient(inputs=['comp1.a'])
assert_rel_error(self, J[0][0], 2.5, 1e-5)
assert_rel_error(self, J[1][0], 1.0, 1e-5)
assert_rel_error(self, J[2][0], 1.0, 1e-5)
assert_rel_error(self, J[3][0], 3.0, 1e-5)
def test_replace(self):
drv = self.asm.add('driver', My2SDriver())
drv.add_constraint('-44.1 < comp1.a < 13.0')
drv.add_constraint('77.0 < comp1.c < 79.0')
self.asm.run()
self.asm.replace('driver', My2SDriver())
if __name__ == "__main__":
unittest.main()
|
data/adieu/allbuttonspressed/urlrouter/views.py
|
from .api import handlers
from .models import URLRoute
from django.shortcuts import get_object_or_404
def show(request, url):
route = get_object_or_404(URLRoute, url=url)
return handlers[route.handler].dispatch(request, route.target)
|
data/LibraryOfCongress/chronam/core/tests/ocr_extractor_tests.py
|
from os.path import dirname, join
from django.test import TestCase
from chronam.core.ocr_extractor import ocr_extractor
class OcrExtractorTests(TestCase):
def test_extractor(self):
dir = join(dirname(dirname(__file__)), 'test-data')
ocr_file = join(dir, 'ocr.xml')
text, coord_info = ocr_extractor(ocr_file)
coords = coord_info["coords"]
expected_text = {"eng": file(join(dir, 'ocr.txt')).read().decode('utf-8')}
self.assertEqual(text, expected_text)
self.assertEqual(len(coords.keys()), 2150)
self.assertEqual(len(coords['place']), 3)
self.assertTrue(coords.has_key('Craft'))
self.assertTrue(not coords.has_key('Craft.'))
|
data/ImageEngine/gaffer/python/GafferUI/NameLabel.py
|
import IECore
import Gaffer
import GafferUI
class NameLabel( GafferUI.Label ) :
def __init__( self, graphComponent, horizontalAlignment=GafferUI.Label.HorizontalAlignment.Left, verticalAlignment=GafferUI.Label.VerticalAlignment.Center, numComponents=1, formatter=None, parenting = None ) :
GafferUI.Label.__init__( self, "", horizontalAlignment, verticalAlignment, parenting = parenting )
self.__formatter = formatter if formatter is not None else self.defaultFormatter
self.__numComponents = numComponents
self.__connections = []
self.__graphComponent = False
self.setGraphComponent( graphComponent )
self.__buttonPressConnection = self.buttonPressSignal().connect( Gaffer.WeakMethod( self.__buttonPress ) )
self.__dragBeginConnection = self.dragBeginSignal().connect( Gaffer.WeakMethod( self.__dragBegin ) )
self.__dragEndConnection = self.dragEndSignal().connect( Gaffer.WeakMethod( self.__dragEnd ) )
def setText( self, text ) :
GafferUI.Label.setText( self, text )
self.__connections = []
def setGraphComponent( self, graphComponent ) :
if graphComponent is not None and self.__graphComponent is not False :
if graphComponent.isSame( self.__graphComponent ) :
return
elif self.__graphComponent is None :
return
self.__graphComponent = graphComponent
self.__setupConnections()
self.__setText()
def getGraphComponent( self ) :
return self.__graphComponent
def setNumComponents( self, numComponents ) :
assert( numComponents > 0 )
if numComponents == self.__numComponents :
return
self.__numComponents = numComponents
self.__setupConnections()
self.__setText()
def getNumComponents( self ) :
return self.__numComponents
def setFormatter( self, formatter ) :
self.__formatter = formatter
self.__setText()
def getFormatter( self ) :
return self.__formatter
@staticmethod
def defaultFormatter( graphComponents ) :
return ".".join( IECore.CamelCase.toSpaced( g.getName() ) for g in graphComponents )
def __setupConnections( self, reuseUntil=None ) :
if self.__graphComponent is None :
self.__connections = []
return
updatedConnections = []
n = 0
g = self.__graphComponent
reuse = reuseUntil is not None
while g is not None and n < self.__numComponents :
if reuse :
updatedConnections.extend( self.__connections[n*2:n*2+2] )
else :
updatedConnections.append( g.nameChangedSignal().connect( Gaffer.WeakMethod( self.__setText ) ) )
if n < self.__numComponents - 1 :
updatedConnections.append( g.parentChangedSignal().connect( Gaffer.WeakMethod( self.__parentChanged ) ) )
if g.isSame( reuseUntil ) :
reuse = False
g = g.parent()
n += 1
self.__connections = updatedConnections
def __parentChanged( self, child, oldParent ) :
self.__setText()
self.__setupConnections( reuseUntil = child )
def __setText( self, *unwantedArgs ) :
graphComponents = []
n = 0
g = self.__graphComponent
while g is not None and n < self.__numComponents :
graphComponents.append( g )
g = g.parent()
n += 1
graphComponents.reverse()
GafferUI.Label.setText( self, self.__formatter( graphComponents ) )
def __buttonPress( self, widget, event ) :
return self.getGraphComponent() is not None and event.buttons & ( event.Buttons.Left | event.Buttons.Middle )
def __dragBegin( self, widget, event ) :
if event.buttons & ( event.Buttons.Left | event.Buttons.Middle ) :
GafferUI.Pointer.setCurrent( "nodes" )
return self.getGraphComponent()
return None
def __dragEnd( self, widget, event ) :
GafferUI.Pointer.setCurrent( None )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.