repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
nrc/servo | tests/wpt/web-platform-tests/tools/py/py/_builtin.py | 259 | 6521 | import sys
try:
reversed = reversed
except NameError:
def reversed(sequence):
"""reversed(sequence) -> reverse iterator over values of the sequence
Return a reverse iterator
"""
if hasattr(sequence, '__reversed__'):
return sequence.__reversed__()
if not hasattr(sequence, '__getitem__'):
raise TypeError("argument to reversed() must be a sequence")
return reversed_iterator(sequence)
class reversed_iterator(object):
def __init__(self, seq):
self.seq = seq
self.remaining = len(seq)
def __iter__(self):
return self
def next(self):
i = self.remaining
if i > 0:
i -= 1
item = self.seq[i]
self.remaining = i
return item
raise StopIteration
def __length_hint__(self):
return self.remaining
try:
any = any
except NameError:
def any(iterable):
for x in iterable:
if x:
return True
return False
try:
all = all
except NameError:
def all(iterable):
for x in iterable:
if not x:
return False
return True
try:
sorted = sorted
except NameError:
builtin_cmp = cmp # need to use cmp as keyword arg
def sorted(iterable, cmp=None, key=None, reverse=0):
use_cmp = None
if key is not None:
if cmp is None:
def use_cmp(x, y):
return builtin_cmp(x[0], y[0])
else:
def use_cmp(x, y):
return cmp(x[0], y[0])
l = [(key(element), element) for element in iterable]
else:
if cmp is not None:
use_cmp = cmp
l = list(iterable)
if use_cmp is not None:
l.sort(use_cmp)
else:
l.sort()
if reverse:
l.reverse()
if key is not None:
return [element for (_, element) in l]
return l
try:
set, frozenset = set, frozenset
except NameError:
from sets import set, frozenset
# pass through
enumerate = enumerate
try:
BaseException = BaseException
except NameError:
BaseException = Exception
try:
GeneratorExit = GeneratorExit
except NameError:
class GeneratorExit(Exception):
""" This exception is never raised, it is there to make it possible to
write code compatible with CPython 2.5 even in lower CPython
versions."""
pass
GeneratorExit.__module__ = 'exceptions'
_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
try:
callable = callable
except NameError:
def callable(obj):
return hasattr(obj, "__call__")
if sys.version_info >= (3, 0):
exec ("print_ = print ; exec_=exec")
import builtins
# some backward compatibility helpers
_basestring = str
def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
if errors is None:
obj = obj.decode(encoding)
else:
obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
def _isbytes(x):
return isinstance(x, bytes)
def _istext(x):
return isinstance(x, str)
text = str
bytes = bytes
def _getimself(function):
return getattr(function, '__self__', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
return getattr(function, "__code__", None)
def execfile(fn, globs=None, locs=None):
if globs is None:
back = sys._getframe(1)
globs = back.f_globals
locs = back.f_locals
del back
elif locs is None:
locs = globs
fp = open(fn, "r")
try:
source = fp.read()
finally:
fp.close()
co = compile(source, fn, "exec", dont_inherit=True)
exec_(co, globs, locs)
else:
import __builtin__ as builtins
_totext = unicode
_basestring = basestring
text = unicode
bytes = str
execfile = execfile
callable = callable
def _isbytes(x):
return isinstance(x, str)
def _istext(x):
return isinstance(x, unicode)
def _getimself(function):
return getattr(function, 'im_self', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
try:
return getattr(function, "__code__")
except AttributeError:
return getattr(function, "func_code", None)
def print_(*args, **kwargs):
""" minimal backport of py3k print statement. """
sep = ' '
if 'sep' in kwargs:
sep = kwargs.pop('sep')
end = '\n'
if 'end' in kwargs:
end = kwargs.pop('end')
file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
if kwargs:
args = ", ".join([str(x) for x in kwargs])
raise TypeError("invalid keyword arguments: %s" % args)
at_start = True
for x in args:
if not at_start:
file.write(sep)
file.write(str(x))
at_start = False
file.write(end)
def exec_(obj, globals=None, locals=None):
""" minimal backport of py3k exec statement. """
__tracebackhide__ = True
if globals is None:
frame = sys._getframe(1)
globals = frame.f_globals
if locals is None:
locals = frame.f_locals
elif locals is None:
locals = globals
exec2(obj, globals, locals)
if sys.version_info >= (3, 0):
def _reraise(cls, val, tb):
__tracebackhide__ = True
assert hasattr(val, '__traceback__')
raise cls.with_traceback(val, tb)
else:
exec ("""
def _reraise(cls, val, tb):
__tracebackhide__ = True
raise cls, val, tb
def exec2(obj, globals, locals):
__tracebackhide__ = True
exec obj in globals, locals
""")
def _tryimport(*names):
""" return the first successfully imported module. """
assert names
for name in names:
try:
__import__(name)
except ImportError:
excinfo = sys.exc_info()
else:
return sys.modules[name]
_reraise(*excinfo)
| mpl-2.0 |
austinban/ai-final-project | submissions/aartiste/myBayes.py | 19 | 3026 | import traceback
from submissions.aartiste import election
from submissions.aartiste import county_demographics
class DataFrame:
data = []
feature_names = []
target = []
target_names = []
trumpECHP = DataFrame()
'''
Extract data from the CORGIS elections, and merge it with the
CORGIS demographics. Both data sets are organized by county and state.
'''
joint = {}
elections = election.get_results()
for county in elections:
try:
st = county['Location']['State Abbreviation']
countyST = county['Location']['County'] + st
trump = county['Vote Data']['Donald Trump']['Percent of Votes']
joint[countyST] = {}
joint[countyST]['ST']= st
joint[countyST]['Trump'] = trump
except:
traceback.print_exc()
demographics = county_demographics.get_all_counties()
for county in demographics:
try:
countyNames = county['County'].split()
cName = ' '.join(countyNames[:-1])
st = county['State']
countyST = cName + st
elderly = county['Age']["Percent 65 and Older"]
college = county['Education']["Bachelor's Degree or Higher"]
home = county['Housing']["Homeownership Rate"]
poverty = county['Income']["Persons Below Poverty Level"]
if countyST in joint:
joint[countyST]['Elderly'] = elderly
joint[countyST]['College'] = college
joint[countyST]['Home'] = home
joint[countyST]['Poverty'] = poverty
except:
traceback.print_exc()
'''
Remove the counties that did not appear in both samples.
'''
intersection = {}
for countyST in joint:
if 'College' in joint[countyST]:
intersection[countyST] = joint[countyST]
trumpECHP.data = []
'''
Build the input frame, row by row.
'''
for countyST in intersection:
# choose the input values
trumpECHP.data.append([
# countyST,
# intersection[countyST]['ST'],
# intersection[countyST]['Trump'],
intersection[countyST]['Elderly'],
intersection[countyST]['College'],
intersection[countyST]['Home'],
intersection[countyST]['Poverty'],
])
trumpECHP.feature_names = [
# 'countyST',
# 'ST',
# 'Trump',
'Elderly',
'College',
'Home',
'Poverty',
]
'''
Build the target list,
one entry for each row in the input frame.
The Naive Bayesian network is a classifier,
i.e. it sorts data points into bins.
The best it can do to estimate a continuous variable
is to break the domain into segments, and predict
the segment into which the variable's value will fall.
In this example, I'm breaking Trump's % into two
arbitrary segments.
'''
trumpECHP.target = []
def trumpTarget(percentage):
if percentage > 45:
return 1
return 0
for countyST in intersection:
# choose the target
tt = trumpTarget(intersection[countyST]['Trump'])
trumpECHP.target.append(tt)
trumpECHP.target_names = [
'Trump <= 45%',
'Trump > 45%',
]
Examples = {
'Trump': trumpECHP,
} | mit |
Azure/azure-sdk-for-python | sdk/webpubsub/azure-mgmt-webpubsub/azure/mgmt/webpubsub/aio/operations/_web_pub_sub_shared_private_link_resources_operations.py | 1 | 23250 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class WebPubSubSharedPrivateLinkResourcesOperations:
"""WebPubSubSharedPrivateLinkResourcesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.webpubsub.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs
) -> AsyncIterable["_models.SharedPrivateLinkResourceList"]:
"""List shared private link resources.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param resource_name: The name of the resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SharedPrivateLinkResourceList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.webpubsub.models.SharedPrivateLinkResourceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedPrivateLinkResourceList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SharedPrivateLinkResourceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources'} # type: ignore
async def get(
self,
shared_private_link_resource_name: str,
resource_group_name: str,
resource_name: str,
**kwargs
) -> "_models.SharedPrivateLinkResource":
"""Get the specified shared private link resource.
:param shared_private_link_resource_name: The name of the shared private link resource.
:type shared_private_link_resource_name: str
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param resource_name: The name of the resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SharedPrivateLinkResource, or the result of cls(response)
:rtype: ~azure.mgmt.webpubsub.models.SharedPrivateLinkResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedPrivateLinkResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'sharedPrivateLinkResourceName': self._serialize.url("shared_private_link_resource_name", shared_private_link_resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SharedPrivateLinkResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources/{sharedPrivateLinkResourceName}'} # type: ignore
async def _create_or_update_initial(
self,
shared_private_link_resource_name: str,
resource_group_name: str,
resource_name: str,
parameters: "_models.SharedPrivateLinkResource",
**kwargs
) -> "_models.SharedPrivateLinkResource":
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedPrivateLinkResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'sharedPrivateLinkResourceName': self._serialize.url("shared_private_link_resource_name", shared_private_link_resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SharedPrivateLinkResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SharedPrivateLinkResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SharedPrivateLinkResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources/{sharedPrivateLinkResourceName}'} # type: ignore
async def begin_create_or_update(
self,
shared_private_link_resource_name: str,
resource_group_name: str,
resource_name: str,
parameters: "_models.SharedPrivateLinkResource",
**kwargs
) -> AsyncLROPoller["_models.SharedPrivateLinkResource"]:
"""Create or update a shared private link resource.
:param shared_private_link_resource_name: The name of the shared private link resource.
:type shared_private_link_resource_name: str
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param resource_name: The name of the resource.
:type resource_name: str
:param parameters: The shared private link resource.
:type parameters: ~azure.mgmt.webpubsub.models.SharedPrivateLinkResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SharedPrivateLinkResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.webpubsub.models.SharedPrivateLinkResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedPrivateLinkResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
shared_private_link_resource_name=shared_private_link_resource_name,
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('SharedPrivateLinkResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'sharedPrivateLinkResourceName': self._serialize.url("shared_private_link_resource_name", shared_private_link_resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources/{sharedPrivateLinkResourceName}'} # type: ignore
async def _delete_initial(
self,
shared_private_link_resource_name: str,
resource_group_name: str,
resource_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'sharedPrivateLinkResourceName': self._serialize.url("shared_private_link_resource_name", shared_private_link_resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources/{sharedPrivateLinkResourceName}'} # type: ignore
async def begin_delete(
self,
shared_private_link_resource_name: str,
resource_group_name: str,
resource_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Delete the specified shared private link resource.
:param shared_private_link_resource_name: The name of the shared private link resource.
:type shared_private_link_resource_name: str
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param resource_name: The name of the resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
shared_private_link_resource_name=shared_private_link_resource_name,
resource_group_name=resource_group_name,
resource_name=resource_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'sharedPrivateLinkResourceName': self._serialize.url("shared_private_link_resource_name", shared_private_link_resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/webPubSub/{resourceName}/sharedPrivateLinkResources/{sharedPrivateLinkResourceName}'} # type: ignore
| mit |
googleads/google-ads-python | google/ads/googleads/v6/services/services/keyword_view_service/transports/grpc.py | 1 | 10080 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v6.resources.types import keyword_view
from google.ads.googleads.v6.services.types import keyword_view_service
from .base import KeywordViewServiceTransport, DEFAULT_CLIENT_INFO
class KeywordViewServiceGrpcTransport(KeywordViewServiceTransport):
"""gRPC backend transport for KeywordViewService.
Service to manage keyword views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_keyword_view(
self,
) -> Callable[
[keyword_view_service.GetKeywordViewRequest], keyword_view.KeywordView
]:
r"""Return a callable for the get keyword view method over gRPC.
Returns the requested keyword view in full detail.
Returns:
Callable[[~.GetKeywordViewRequest],
~.KeywordView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_keyword_view" not in self._stubs:
self._stubs["get_keyword_view"] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v6.services.KeywordViewService/GetKeywordView",
request_serializer=keyword_view_service.GetKeywordViewRequest.serialize,
response_deserializer=keyword_view.KeywordView.deserialize,
)
return self._stubs["get_keyword_view"]
__all__ = ("KeywordViewServiceGrpcTransport",)
| apache-2.0 |
GodBlessPP/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/contextlib.py | 737 | 8788 | """Utilities for with-statement contexts. See PEP 343."""
import sys
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, *args, **kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, *self.args, **self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, *args, **kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
def _fix_exception_context(new_exc, old_exc):
while 1:
exc_context = new_exc.__context__
if exc_context in (None, frame_exc):
break
new_exc = exc_context
new_exc.__context__ = old_exc
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
return received_exc and suppressed_exc
| agpl-3.0 |
Endika/odoo | addons/hw_escpos/escpos/constants.py | 278 | 7471 | # -*- coding: utf-8 -*-
""" ESC/POS Commands (Constants) """
# Feed control sequences
CTL_LF = '\x0a' # Print and line feed
CTL_FF = '\x0c' # Form feed
CTL_CR = '\x0d' # Carriage return
CTL_HT = '\x09' # Horizontal tab
CTL_VT = '\x0b' # Vertical tab
# RT Status commands
DLE_EOT_PRINTER = '\x10\x04\x01' # Transmit printer status
DLE_EOT_OFFLINE = '\x10\x04\x02'
DLE_EOT_ERROR = '\x10\x04\x03'
DLE_EOT_PAPER = '\x10\x04\x04'
# Printer hardware
HW_INIT = '\x1b\x40' # Clear data in buffer and reset modes
HW_SELECT = '\x1b\x3d\x01' # Printer select
HW_RESET = '\x1b\x3f\x0a\x00' # Reset printer hardware
# Cash Drawer
CD_KICK_2 = '\x1b\x70\x00' # Sends a pulse to pin 2 []
CD_KICK_5 = '\x1b\x70\x01' # Sends a pulse to pin 5 []
# Paper
PAPER_FULL_CUT = '\x1d\x56\x00' # Full cut paper
PAPER_PART_CUT = '\x1d\x56\x01' # Partial cut paper
# Text format
TXT_NORMAL = '\x1b\x21\x00' # Normal text
TXT_2HEIGHT = '\x1b\x21\x10' # Double height text
TXT_2WIDTH = '\x1b\x21\x20' # Double width text
TXT_DOUBLE = '\x1b\x21\x30' # Double height & Width
TXT_UNDERL_OFF = '\x1b\x2d\x00' # Underline font OFF
TXT_UNDERL_ON = '\x1b\x2d\x01' # Underline font 1-dot ON
TXT_UNDERL2_ON = '\x1b\x2d\x02' # Underline font 2-dot ON
TXT_BOLD_OFF = '\x1b\x45\x00' # Bold font OFF
TXT_BOLD_ON = '\x1b\x45\x01' # Bold font ON
TXT_FONT_A = '\x1b\x4d\x00' # Font type A
TXT_FONT_B = '\x1b\x4d\x01' # Font type B
TXT_ALIGN_LT = '\x1b\x61\x00' # Left justification
TXT_ALIGN_CT = '\x1b\x61\x01' # Centering
TXT_ALIGN_RT = '\x1b\x61\x02' # Right justification
TXT_COLOR_BLACK = '\x1b\x72\x00' # Default Color
TXT_COLOR_RED = '\x1b\x72\x01' # Alternative Color ( Usually Red )
# Text Encoding
TXT_ENC_PC437 = '\x1b\x74\x00' # PC437 USA
TXT_ENC_KATAKANA= '\x1b\x74\x01' # KATAKANA (JAPAN)
TXT_ENC_PC850 = '\x1b\x74\x02' # PC850 Multilingual
TXT_ENC_PC860 = '\x1b\x74\x03' # PC860 Portuguese
TXT_ENC_PC863 = '\x1b\x74\x04' # PC863 Canadian-French
TXT_ENC_PC865 = '\x1b\x74\x05' # PC865 Nordic
TXT_ENC_KANJI6 = '\x1b\x74\x06' # One-pass Kanji, Hiragana
TXT_ENC_KANJI7 = '\x1b\x74\x07' # One-pass Kanji
TXT_ENC_KANJI8 = '\x1b\x74\x08' # One-pass Kanji
TXT_ENC_PC851 = '\x1b\x74\x0b' # PC851 Greek
TXT_ENC_PC853 = '\x1b\x74\x0c' # PC853 Turkish
TXT_ENC_PC857 = '\x1b\x74\x0d' # PC857 Turkish
TXT_ENC_PC737 = '\x1b\x74\x0e' # PC737 Greek
TXT_ENC_8859_7 = '\x1b\x74\x0f' # ISO8859-7 Greek
TXT_ENC_WPC1252 = '\x1b\x74\x10' # WPC1252
TXT_ENC_PC866 = '\x1b\x74\x11' # PC866 Cyrillic #2
TXT_ENC_PC852 = '\x1b\x74\x12' # PC852 Latin2
TXT_ENC_PC858 = '\x1b\x74\x13' # PC858 Euro
TXT_ENC_KU42 = '\x1b\x74\x14' # KU42 Thai
TXT_ENC_TIS11 = '\x1b\x74\x15' # TIS11 Thai
TXT_ENC_TIS18 = '\x1b\x74\x1a' # TIS18 Thai
TXT_ENC_TCVN3 = '\x1b\x74\x1e' # TCVN3 Vietnamese
TXT_ENC_TCVN3B = '\x1b\x74\x1f' # TCVN3 Vietnamese
TXT_ENC_PC720 = '\x1b\x74\x20' # PC720 Arabic
TXT_ENC_WPC775 = '\x1b\x74\x21' # WPC775 Baltic Rim
TXT_ENC_PC855 = '\x1b\x74\x22' # PC855 Cyrillic
TXT_ENC_PC861 = '\x1b\x74\x23' # PC861 Icelandic
TXT_ENC_PC862 = '\x1b\x74\x24' # PC862 Hebrew
TXT_ENC_PC864 = '\x1b\x74\x25' # PC864 Arabic
TXT_ENC_PC869 = '\x1b\x74\x26' # PC869 Greek
TXT_ENC_PC936 = '\x1C\x21\x00' # PC936 GBK(Guobiao Kuozhan)
TXT_ENC_8859_2 = '\x1b\x74\x27' # ISO8859-2 Latin2
TXT_ENC_8859_9 = '\x1b\x74\x28' # ISO8859-2 Latin9
TXT_ENC_PC1098 = '\x1b\x74\x29' # PC1098 Farsi
TXT_ENC_PC1118 = '\x1b\x74\x2a' # PC1118 Lithuanian
TXT_ENC_PC1119 = '\x1b\x74\x2b' # PC1119 Lithuanian
TXT_ENC_PC1125 = '\x1b\x74\x2c' # PC1125 Ukrainian
TXT_ENC_WPC1250 = '\x1b\x74\x2d' # WPC1250 Latin2
TXT_ENC_WPC1251 = '\x1b\x74\x2e' # WPC1251 Cyrillic
TXT_ENC_WPC1253 = '\x1b\x74\x2f' # WPC1253 Greek
TXT_ENC_WPC1254 = '\x1b\x74\x30' # WPC1254 Turkish
TXT_ENC_WPC1255 = '\x1b\x74\x31' # WPC1255 Hebrew
TXT_ENC_WPC1256 = '\x1b\x74\x32' # WPC1256 Arabic
TXT_ENC_WPC1257 = '\x1b\x74\x33' # WPC1257 Baltic Rim
TXT_ENC_WPC1258 = '\x1b\x74\x34' # WPC1258 Vietnamese
TXT_ENC_KZ1048 = '\x1b\x74\x35' # KZ-1048 Kazakhstan
TXT_ENC_KATAKANA_MAP = {
# Maps UTF-8 Katakana symbols to KATAKANA Page Codes
# Half-Width Katakanas
'\xef\xbd\xa1':'\xa1', # 。
'\xef\xbd\xa2':'\xa2', # 「
'\xef\xbd\xa3':'\xa3', # 」
'\xef\xbd\xa4':'\xa4', # 、
'\xef\xbd\xa5':'\xa5', # ・
'\xef\xbd\xa6':'\xa6', # ヲ
'\xef\xbd\xa7':'\xa7', # ァ
'\xef\xbd\xa8':'\xa8', # ィ
'\xef\xbd\xa9':'\xa9', # ゥ
'\xef\xbd\xaa':'\xaa', # ェ
'\xef\xbd\xab':'\xab', # ォ
'\xef\xbd\xac':'\xac', # ャ
'\xef\xbd\xad':'\xad', # ュ
'\xef\xbd\xae':'\xae', # ョ
'\xef\xbd\xaf':'\xaf', # ッ
'\xef\xbd\xb0':'\xb0', # ー
'\xef\xbd\xb1':'\xb1', # ア
'\xef\xbd\xb2':'\xb2', # イ
'\xef\xbd\xb3':'\xb3', # ウ
'\xef\xbd\xb4':'\xb4', # エ
'\xef\xbd\xb5':'\xb5', # オ
'\xef\xbd\xb6':'\xb6', # カ
'\xef\xbd\xb7':'\xb7', # キ
'\xef\xbd\xb8':'\xb8', # ク
'\xef\xbd\xb9':'\xb9', # ケ
'\xef\xbd\xba':'\xba', # コ
'\xef\xbd\xbb':'\xbb', # サ
'\xef\xbd\xbc':'\xbc', # シ
'\xef\xbd\xbd':'\xbd', # ス
'\xef\xbd\xbe':'\xbe', # セ
'\xef\xbd\xbf':'\xbf', # ソ
'\xef\xbe\x80':'\xc0', # タ
'\xef\xbe\x81':'\xc1', # チ
'\xef\xbe\x82':'\xc2', # ツ
'\xef\xbe\x83':'\xc3', # テ
'\xef\xbe\x84':'\xc4', # ト
'\xef\xbe\x85':'\xc5', # ナ
'\xef\xbe\x86':'\xc6', # ニ
'\xef\xbe\x87':'\xc7', # ヌ
'\xef\xbe\x88':'\xc8', # ネ
'\xef\xbe\x89':'\xc9', # ノ
'\xef\xbe\x8a':'\xca', # ハ
'\xef\xbe\x8b':'\xcb', # ヒ
'\xef\xbe\x8c':'\xcc', # フ
'\xef\xbe\x8d':'\xcd', # ヘ
'\xef\xbe\x8e':'\xce', # ホ
'\xef\xbe\x8f':'\xcf', # マ
'\xef\xbe\x90':'\xd0', # ミ
'\xef\xbe\x91':'\xd1', # ム
'\xef\xbe\x92':'\xd2', # メ
'\xef\xbe\x93':'\xd3', # モ
'\xef\xbe\x94':'\xd4', # ヤ
'\xef\xbe\x95':'\xd5', # ユ
'\xef\xbe\x96':'\xd6', # ヨ
'\xef\xbe\x97':'\xd7', # ラ
'\xef\xbe\x98':'\xd8', # リ
'\xef\xbe\x99':'\xd9', # ル
'\xef\xbe\x9a':'\xda', # レ
'\xef\xbe\x9b':'\xdb', # ロ
'\xef\xbe\x9c':'\xdc', # ワ
'\xef\xbe\x9d':'\xdd', # ン
'\xef\xbe\x9e':'\xde', # ゙
'\xef\xbe\x9f':'\xdf', # ゚
}
# Barcod format
BARCODE_TXT_OFF = '\x1d\x48\x00' # HRI barcode chars OFF
BARCODE_TXT_ABV = '\x1d\x48\x01' # HRI barcode chars above
BARCODE_TXT_BLW = '\x1d\x48\x02' # HRI barcode chars below
BARCODE_TXT_BTH = '\x1d\x48\x03' # HRI barcode chars both above and below
BARCODE_FONT_A = '\x1d\x66\x00' # Font type A for HRI barcode chars
BARCODE_FONT_B = '\x1d\x66\x01' # Font type B for HRI barcode chars
BARCODE_HEIGHT = '\x1d\x68\x64' # Barcode Height [1-255]
BARCODE_WIDTH = '\x1d\x77\x03' # Barcode Width [2-6]
BARCODE_UPC_A = '\x1d\x6b\x00' # Barcode type UPC-A
BARCODE_UPC_E = '\x1d\x6b\x01' # Barcode type UPC-E
BARCODE_EAN13 = '\x1d\x6b\x02' # Barcode type EAN13
BARCODE_EAN8 = '\x1d\x6b\x03' # Barcode type EAN8
BARCODE_CODE39 = '\x1d\x6b\x04' # Barcode type CODE39
BARCODE_ITF = '\x1d\x6b\x05' # Barcode type ITF
BARCODE_NW7 = '\x1d\x6b\x06' # Barcode type NW7
# Image format
S_RASTER_N = '\x1d\x76\x30\x00' # Set raster image normal size
S_RASTER_2W = '\x1d\x76\x30\x01' # Set raster image double width
S_RASTER_2H = '\x1d\x76\x30\x02' # Set raster image double height
S_RASTER_Q = '\x1d\x76\x30\x03' # Set raster image quadruple
| agpl-3.0 |
googleinterns/where-is-my-watch | GpsDataAnalyzer/calculator/deviation_calculator.py | 1 | 8378 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles Calculations on pairs of GPS data sets.
Usage:
gps_fileparser = FileParser()
phone_data_set = gps_fileparser.parse_file("<file_path>/2020-07-21T19:48:44.697Z.xml")
simulator_data_set = gps_fileparser.parse_file("<file_path>/GPSSIM-2020-07-21_19:49:31.csv")
downsampled_list = simulator_data_set[0].gps_data_list[::10]
simulator_data_set[0].gps_data_list = downsampled_list
calculator = DataSetDeviationCalculator(phone_data_set, simulator_data_set[0])
devation_dataframe = calculator.get_deviation_dataframe()
"""
from datetime import datetime, timedelta
from datetime import timezone
import time
import numpy as np
import pandas as pd
from GpsDataAnalyzer import utils
from GpsDataAnalyzer.fileparser.fileparser import FileParser
from GpsDataAnalyzer.calculator import alignment_algorithms
class DataSetDeviationCalculator:
"""An object for Calculating Deviations on two data sets.
Attributes:
data_set_1: GpsDataSet
data_set_2: GpsDataSet
starting_time_1: Datetime, offset included start time for 1st set
starting_time_2: Datetime, offset included start time for 2nd set
ending_time_1: Datetime, offset included end time for 1st set
ending_time_2: Datetime, offset included end time for 2nd set
offset_mapping_1: Dictionary, {DateTime: [GpsData, ], ...}
offset_mapping_2: Dictionary, {DateTime: [GpsData, ], ...}
deviations_dataframe: Pandas Dataframe that holds values after calculation
"""
def __init__(self, data_set_1, data_set_2):
self.data_set_1 = data_set_1
self.data_set_2 = data_set_2
self.starting_time_1 = None
self.starting_time_2 = None
self.ending_time_1 = None
self.ending_time_2 = None
self.offset_mapping_1= {}
self.offset_mapping_2 = {}
self.deviations_dataframe = None
self.availability = None
start = time.perf_counter()
print("Optimized lineup implementation:")
self.starting_time_1, self.starting_time_2 = alignment_algorithms.find_lineup(self.data_set_1,
self.data_set_2)
end = time.perf_counter()
print(f"Lined up data in {end - start:0.4f} seconds")
print("start time 1: " + str(self.starting_time_1))
print("start time 2: " + str(self.starting_time_2))
print("\n")
self.ending_time_1 = self.data_set_1.gps_meta_data.end_time
self.ending_time_2 = self.data_set_2.gps_meta_data.end_time
if not self.starting_time_1 and not self.starting_time_2:
self.offset_mapping_1 = alignment_algorithms.create_time_to_points_mapping(self.data_set_1, 0)
self.offset_mapping_2 = alignment_algorithms.create_time_to_points_mapping(self.data_set_2, 0)
elif self.data_set_1.gps_data_list[0].time > self.data_set_2.gps_data_list[0].time:
offset = (self.starting_time_1-self.starting_time_2).total_seconds()
self.offset_mapping_1 = alignment_algorithms.create_time_to_points_mapping(self.data_set_1, 0)
self.offset_mapping_2 = alignment_algorithms.create_time_to_points_mapping(self.data_set_2, offset)
self.ending_time_2 = self.ending_time_2 + timedelta(seconds=offset)
else:
offset = (self.starting_time_2-self.starting_time_1).total_seconds()
self.offset_mapping_1 = alignment_algorithms.create_time_to_points_mapping(self.data_set_1, offset)
self.offset_mapping_2 = alignment_algorithms.create_time_to_points_mapping(self.data_set_2, 0)
self.ending_time_1 = self.ending_time_1 + timedelta(seconds=offset)
def get_deviation_dataframe(self):
"""
Extracts and returns deviation for each valid timestamp & other information.
Returns:
A pandas dataframe including the shared timestamp with the offset included,
the deviations of lat/lon, the difference in speed, the difference in
altitude, and the original timestamps for each set
"""
if self.deviations_dataframe is not None:
return self.deviations_dataframe
time_list, distance_deviation_list, speed_deviation_list, altitude_deviation_list= [], [], [], []
set1_time_list, set2_time_list = [], []
set1_average_signal_list, set2_average_signal_list, signal_deviation_list = [], [], []
for timestamp in self.offset_mapping_1:
if timestamp in self.offset_mapping_2:
time_list.append(timestamp)
# Get the mapping pair of data points in each dataset
point1 = self.offset_mapping_1[timestamp][0]
point2 = self.offset_mapping_2[timestamp][0]
# Calculate the distance deviation
location1 = (point1.latitude, point1.longitude)
location2 = (point2.latitude, point2.longitude)
distance_deviation_list.append(utils.calculate_distance(location1, location2))
# Calculate the speed differentials
speed_deviation_list.append(point2.speed - point1.speed)
# Calculate the altitude differentials
if point1.altitude is None or point2.altitude is None:
altitude_deviation_list.append(None)
else:
altitude_deviation_list.append(point2.altitude - point1.altitude)
# Append the original timestamp in each dataset
set1_time_list.append(point1.time)
set2_time_list.append(point2.time)
# Append the average signal if have
set1_average_signal_list.append(point1.average_signal)
set2_average_signal_list.append(point2.average_signal)
signal_deviation_list.append(point2.average_signal - point1.average_signal)
self.deviations_dataframe = pd.DataFrame({"Common Timestamp": time_list,
"Distance Deviations": distance_deviation_list,
"Speed Deviations": speed_deviation_list,
"Altitude Deviations": altitude_deviation_list,
"Set 1 Timestamp": set1_time_list,
"Set 2 Timestamp": set2_time_list,
"Set 1 Average Signal": set1_average_signal_list,
"Set 2 Average Signal": set2_average_signal_list,
"Signal Deviations": signal_deviation_list})
return self.deviations_dataframe
def get_availability(self):
"""
Calculate the availability of wear captured gps data
Returns:
Percentile of wear captured gps data by compared gpsdataset
"""
if self.availability:
return self.availability
if not self.starting_time_1 and not self.starting_time_2:
return 0
total_timestamps = 0
available_timestamps = 0
start_time = utils.round_time(max(self.starting_time_1, self.starting_time_2))
end_time = utils.round_time(min(self.ending_time_1, self.ending_time_2))
total_seconds = int((end_time-start_time).total_seconds())
for timestamp in [start_time + timedelta(seconds=x) for x in range(total_seconds)]:
if timestamp in self.offset_mapping_1 and timestamp in self.offset_mapping_2:
available_timestamps += 1
return round(available_timestamps / total_seconds, 4)*100
| apache-2.0 |
xen0l/ansible | lib/ansible/modules/database/postgresql/postgresql_ext.py | 47 | 5528 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: postgresql_ext
short_description: Add or remove PostgreSQL extensions from a database.
description:
- Add or remove PostgreSQL extensions from a database.
version_added: "1.9"
options:
name:
description:
- name of the extension to add or remove
required: true
db:
description:
- name of the database to add or remove the extension to/from
required: true
login_user:
description:
- The username used to authenticate with
login_password:
description:
- The password used to authenticate with
login_host:
description:
- Host running the database
default: localhost
port:
description:
- Database port to connect to.
default: 5432
state:
description:
- The database extension state
default: present
choices: [ "present", "absent" ]
notes:
- The default authentication assumes that you are either logging in as or sudo'ing to the C(postgres) account on the host.
- This module uses I(psycopg2), a Python PostgreSQL database adapter. You must ensure that psycopg2 is installed on
the host before using this module. If the remote host is the PostgreSQL server (which is the default case), then PostgreSQL must also be installed
on the remote host. For Ubuntu-based systems, install the C(postgresql), C(libpq-dev), and C(python-psycopg2) packages on the remote host before using
this module.
requirements: [ psycopg2 ]
author: "Daniel Schep (@dschep)"
'''
EXAMPLES = '''
# Adds postgis to the database "acme"
- postgresql_ext:
name: postgis
db: acme
'''
import traceback
try:
import psycopg2
import psycopg2.extras
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def ext_exists(cursor, ext):
query = "SELECT * FROM pg_extension WHERE extname=%(ext)s"
cursor.execute(query, {'ext': ext})
return cursor.rowcount == 1
def ext_delete(cursor, ext):
if ext_exists(cursor, ext):
query = "DROP EXTENSION \"%s\"" % ext
cursor.execute(query)
return True
else:
return False
def ext_create(cursor, ext):
if not ext_exists(cursor, ext):
query = 'CREATE EXTENSION "%s"' % ext
cursor.execute(query)
return True
else:
return False
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default="", no_log=True),
login_host=dict(default=""),
port=dict(default="5432"),
db=dict(required=True),
ext=dict(required=True, aliases=['name']),
state=dict(default="present", choices=["absent", "present"]),
),
supports_check_mode=True
)
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
db = module.params["db"]
ext = module.params["ext"]
state = module.params["state"]
changed = False
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host": "host",
"login_user": "user",
"login_password": "password",
"port": "port"
}
kw = dict((params_map[k], v) for (k, v) in module.params.items()
if k in params_map and v != '')
try:
db_connection = psycopg2.connect(database=db, **kw)
# Enable autocommit so we can create databases
if psycopg2.__version__ >= '2.4.2':
db_connection.autocommit = True
else:
db_connection.set_isolation_level(psycopg2
.extensions
.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = db_connection.cursor(
cursor_factory=psycopg2.extras.DictCursor)
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
try:
if module.check_mode:
if state == "present":
changed = not ext_exists(cursor, ext)
elif state == "absent":
changed = ext_exists(cursor, ext)
else:
if state == "absent":
changed = ext_delete(cursor, ext)
elif state == "present":
changed = ext_create(cursor, ext)
except NotSupportedError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
module.exit_json(changed=changed, db=db, ext=ext)
if __name__ == '__main__':
main()
| gpl-3.0 |
LockScreen/Backend | venv/lib/python2.7/site-packages/flask/json.py | 428 | 8113 | # -*- coding: utf-8 -*-
"""
flask.jsonimpl
~~~~~~~~~~~~~~
Implementation helpers for the JSON support in Flask.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import io
import uuid
from datetime import datetime
from .globals import current_app, request
from ._compat import text_type, PY2
from werkzeug.http import http_date
from jinja2 import Markup
# Use the same json implementation as itsdangerous on which we
# depend anyways.
try:
from itsdangerous import simplejson as _json
except ImportError:
from itsdangerous import json as _json
# figure out if simplejson escapes slashes. This behavior was changed
# from one version to another without reason.
_slash_escape = '\\/' not in _json.dumps('/')
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump',
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder',
'jsonify']
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
"""The default Flask JSON encoder. This one extends the default simplejson
encoder by also supporting ``datetime`` objects, ``UUID`` as well as
``Markup`` objects which are serialized as RFC 822 datetime strings (same
as the HTTP date format). In order to support more data types override the
:meth:`default` method.
"""
def default(self, o):
"""Implement this method in a subclass such that it returns a
serializable object for ``o``, or calls the base implementation (to
raise a ``TypeError``).
For example, to support arbitrary iterators, you could implement
default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
if isinstance(o, datetime):
return http_date(o)
if isinstance(o, uuid.UUID):
return str(o)
if hasattr(o, '__html__'):
return text_type(o.__html__())
return _json.JSONEncoder.default(self, o)
class JSONDecoder(_json.JSONDecoder):
"""The default JSON decoder. This one does not change the behavior from
the default simplejson encoder. Consult the :mod:`json` documentation
for more information. This decoder is not only used for the load
functions of this module but also :attr:`~flask.Request`.
"""
def _dump_arg_defaults(kwargs):
"""Inject default arguments for dump functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def _load_arg_defaults(kwargs):
"""Inject default arguments for load functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_decoder)
else:
kwargs.setdefault('cls', JSONDecoder)
def dumps(obj, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an
application on the stack.
This function can return ``unicode`` strings or ascii-only bytestrings by
default which coerce into unicode strings automatically. That behavior by
default is controlled by the ``JSON_AS_ASCII`` configuration variable
and can be overriden by the simplejson ``ensure_ascii`` parameter.
"""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def loads(s, **kwargs):
"""Unserialize a JSON object from a string ``s`` by using the application's
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an
application on the stack.
"""
_load_arg_defaults(kwargs)
if isinstance(s, bytes):
s = s.decode(kwargs.pop('encoding', None) or 'utf-8')
return _json.loads(s, **kwargs)
def load(fp, **kwargs):
"""Like :func:`loads` but reads from a file object.
"""
_load_arg_defaults(kwargs)
if not PY2:
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8')
return _json.load(fp, **kwargs)
def htmlsafe_dumps(obj, **kwargs):
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
tags. It accepts the same arguments and returns a JSON string. Note that
this is available in templates through the ``|tojson`` filter which will
also mark the result as safe. Due to how this function escapes certain
characters this is safe even if used outside of ``<script>`` tags.
The following characters are escaped in strings:
- ``<``
- ``>``
- ``&``
- ``'``
This makes it safe to embed such strings in any place in HTML with the
notable exception of double quoted attributes. In that case single
quote your attributes or HTML escape it in addition.
.. versionchanged:: 0.10
This function's return value is now always safe for HTML usage, even
if outside of script tags or if used in XHTML. This rule does not
hold true when using this function in HTML attributes that are double
quoted. Always single quote attributes if you use the ``|tojson``
filter. Alternatively use ``|tojson|forceescape``.
"""
rv = dumps(obj, **kwargs) \
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
if not _slash_escape:
rv = rv.replace('\\/', '/')
return rv
def htmlsafe_dump(obj, fp, **kwargs):
"""Like :func:`htmlsafe_dumps` but writes into a file object."""
fp.write(unicode(htmlsafe_dumps(obj, **kwargs)))
def jsonify(*args, **kwargs):
"""Creates a :class:`~flask.Response` with the JSON representation of
the given arguments with an `application/json` mimetype. The arguments
to this function are the same as to the :class:`dict` constructor.
Example usage::
from flask import jsonify
@app.route('/_get_current_user')
def get_current_user():
return jsonify(username=g.user.username,
email=g.user.email,
id=g.user.id)
This will send a JSON response like this to the browser::
{
"username": "admin",
"email": "admin@localhost",
"id": 42
}
For security reasons only objects are supported toplevel. For more
information about this, have a look at :ref:`json-security`.
This function's response will be pretty printed if it was not requested
with ``X-Requested-With: XMLHttpRequest`` to simplify debugging unless
the ``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to false.
.. versionadded:: 0.2
"""
indent = None
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] \
and not request.is_xhr:
indent = 2
return current_app.response_class(dumps(dict(*args, **kwargs),
indent=indent),
mimetype='application/json')
def tojson_filter(obj, **kwargs):
return Markup(htmlsafe_dumps(obj, **kwargs))
| mit |
fowode/pychess | lib/pychess/Variants/blindfold.py | 21 | 1640 | from pychess.Utils.const import *
from pychess.Utils.Board import Board
class BlindfoldBoard(Board):
variant = BLINDFOLDCHESS
class BlindfoldChess:
__desc__ = _("Classic chess rules with hidden figurines\n" +
"http://en.wikipedia.org/wiki/Blindfold_chess")
name = _("Blindfold")
cecp_name = "normal"
board = BlindfoldBoard
need_initial_board = False
standard_rules = True
variant_group = VARIANTS_BLINDFOLD
class HiddenPawnsBoard(Board):
variant = HIDDENPAWNSCHESS
class HiddenPawnsChess:
__desc__ = _("Classic chess rules with hidden pawns\n" +
"http://en.wikipedia.org/wiki/Blindfold_chess")
name = _("Hidden pawns")
cecp_name = "normal"
board = HiddenPawnsBoard
need_initial_board = False
standard_rules = True
variant_group = VARIANTS_BLINDFOLD
class HiddenPiecesBoard(Board):
variant = HIDDENPIECESCHESS
class HiddenPiecesChess:
__desc__ = _("Classic chess rules with hidden pieces\n" +
"http://en.wikipedia.org/wiki/Blindfold_chess")
name = _("Hidden pieces")
cecp_name = "normal"
board = HiddenPiecesBoard
need_initial_board = False
standard_rules = True
variant_group = VARIANTS_BLINDFOLD
class AllWhiteBoard(Board):
variant = ALLWHITECHESS
class AllWhiteChess:
__desc__ = _("Classic chess rules with all pieces white\n" +
"http://en.wikipedia.org/wiki/Blindfold_chess")
name = _("All white")
cecp_name = "normal"
board = AllWhiteBoard
need_initial_board = False
standard_rules = True
variant_group = VARIANTS_BLINDFOLD
| gpl-3.0 |
maurofaccenda/ansible | lib/ansible/modules/network/nxos/nxos_ospf_vrf.py | 8 | 16203 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_ospf_vrf
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages a VRF for an OSPF router.
description:
- Manages a VRF for an OSPF router.
author: Gabriele Gerbino (@GGabriele)
notes:
- Value I(default) restores params default value, if any.
Otherwise it removes the existing param configuration.
options:
vrf:
description:
- Name of the resource instance. Valid value is a string.
The name 'default' is a valid VRF representing the global OSPF.
required: false
default: default
ospf:
description:
- Name of the OSPF instance.
required: true
default: null
router_id:
description:
- Router Identifier (ID) of the OSPF router VRF instance.
required: false
default: null
default_metric:
description:
- Specify the default Metric value. Valid values are an integer
or the keyword 'default'.
required: false
default: null
log_adjacency:
description:
- Controls the level of log messages generated whenever a
neighbor changes state. Valid values are 'log', 'detail',
and 'default'.
required: false
choices: ['log','detail','default']
default: null
timer_throttle_lsa_start:
description:
- Specify the start interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_lsa_hold:
description:
- Specify the hold interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_lsa_max:
description:
- Specify the max interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_spf_start:
description:
- Specify initial Shortest Path First (SPF) schedule delay.
Valid values are an integer, in milliseconds, or
the keyword 'default'.
required: false
default: null
timer_throttle_spf_hold:
description:
- Specify minimum hold time between Shortest Path First (SPF)
calculations. Valid values are an integer, in milliseconds,
or the keyword 'default'.
required: false
default: null
timer_throttle_spf_max:
description:
- Specify the maximum wait time between Shortest Path First (SPF)
calculations. Valid values are an integer, in milliseconds,
or the keyword 'default'.
required: false
default: null
auto_cost:
description:
- Specifies the reference bandwidth used to assign OSPF cost.
Valid values are an integer, in Mbps, or the keyword 'default'.
required: false
default: null
'''
EXAMPLES = '''
- nxos_ospf_vrf:
ospf: 1
timer_throttle_spf_start: 50
timer_throttle_spf_hold: 1000
timer_throttle_spf_max: 2000
timer_throttle_lsa_start: 60
timer_throttle_lsa_hold: 1100
timer_throttle_lsa_max: 3000
vrf: test
state: present
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"ospf": "1", "timer_throttle_lsa_hold": "1100",
"timer_throttle_lsa_max": "3000", "timer_throttle_lsa_start": "60",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "2000", "timer_throttle_spf_start": "50",
"vrf": "test"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"auto_cost": "40000", "default_metric": "", "log_adjacency": "",
"ospf": "1", "router_id": "", "timer_throttle_lsa_hold": "5000",
"timer_throttle_lsa_max": "5000", "timer_throttle_lsa_start": "0",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "5000",
"timer_throttle_spf_start": "200", "vrf": "test"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"auto_cost": "40000", "default_metric": "", "log_adjacency": "",
"ospf": "1", "router_id": "", "timer_throttle_lsa_hold": "1100",
"timer_throttle_lsa_max": "3000", "timer_throttle_lsa_start": "60",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "2000", "timer_throttle_spf_start": "50",
"vrf": "test"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["router ospf 1", "vrf test", "timers throttle lsa 60 1100 3000",
"timers throttle spf 50 1000 2000"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
import re
PARAM_TO_COMMAND_KEYMAP = {
'router_id': 'router-id',
'default_metric': 'default-metric',
'log_adjacency': 'log-adjacency-changes',
'timer_throttle_lsa_start': 'timers throttle lsa',
'timer_throttle_lsa_max': 'timers throttle lsa',
'timer_throttle_lsa_hold': 'timers throttle lsa',
'timer_throttle_spf_max': 'timers throttle spf',
'timer_throttle_spf_start': 'timers throttle spf',
'timer_throttle_spf_hold': 'timers throttle spf',
'auto_cost': 'auto-cost reference-bandwidth'
}
PARAM_TO_DEFAULT_KEYMAP = {
'timer_throttle_lsa_start': '0',
'timer_throttle_lsa_max': '5000',
'timer_throttle_lsa_hold': '5000',
'timer_throttle_spf_start': '200',
'timer_throttle_spf_max': '5000',
'timer_throttle_spf_hold': '1000',
'auto_cost': '40000'
}
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
if arg == 'log_adjacency':
if 'log-adjacency-changes detail' in config:
value = 'detail'
else:
value = 'log'
else:
value_list = REGEX.search(config).group('value').split()
if 'hold' in arg:
value = value_list[1]
elif 'max' in arg:
value = value_list[2]
elif 'auto' in arg:
if 'Gbps' in value_list:
value = str(int(value_list[0]) * 1000)
else:
value = value_list[0]
else:
value = value_list[0]
return value
def get_existing(module, args):
existing = {}
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
config = netcfg.get_section(parents)
if config:
if module.params['vrf'] == 'default':
splitted_config = config.splitlines()
vrf_index = False
for index in range(0, len(splitted_config) - 1):
if 'vrf' in splitted_config[index].strip():
vrf_index = index
break
if vrf_index:
config = '\n'.join(splitted_config[0:vrf_index])
for arg in args:
if arg not in ['ospf', 'vrf']:
existing[arg] = get_value(arg, config, module)
existing['vrf'] = module.params['vrf']
existing['ospf'] = module.params['ospf']
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default':
if existing_commands.get(key):
existing_value = existing_commands.get(key)
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key == 'timers throttle lsa':
command = '{0} {1} {2} {3}'.format(
key,
proposed['timer_throttle_lsa_start'],
proposed['timer_throttle_lsa_hold'],
proposed['timer_throttle_lsa_max'])
elif key == 'timers throttle spf':
command = '{0} {1} {2} {3}'.format(
key,
proposed['timer_throttle_spf_start'],
proposed['timer_throttle_spf_hold'],
proposed['timer_throttle_spf_max'])
elif key == 'log-adjacency-changes':
if value == 'log':
command = key
elif value == 'detail':
command = '{0} {1}'.format(key, value)
elif key == 'auto-cost reference-bandwidth':
if len(value) < 5:
command = '{0} {1} Mbps'.format(key, value)
else:
value = str(int(value) / 1000)
command = '{0} {1} Gbps'.format(key, value)
else:
command = '{0} {1}'.format(key, value.lower())
if command not in commands:
commands.append(command)
if commands:
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
commands = []
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] == 'default':
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in existing_commands.items():
if value:
if key == 'timers throttle lsa':
command = 'no {0} {1} {2} {3}'.format(
key,
existing['timer_throttle_lsa_start'],
existing['timer_throttle_lsa_hold'],
existing['timer_throttle_lsa_max'])
elif key == 'timers throttle spf':
command = 'no {0} {1} {2} {3}'.format(
key,
existing['timer_throttle_spf_start'],
existing['timer_throttle_spf_hold'],
existing['timer_throttle_spf_max'])
else:
existing_value = existing_commands.get(key)
command = 'no {0} {1}'.format(key, existing_value)
if command not in commands:
commands.append(command)
else:
commands = ['no vrf {0}'.format(module.params['vrf'])]
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
vrf=dict(required=False, type='str', default='default'),
ospf=dict(required=True, type='str'),
router_id=dict(required=False, type='str'),
default_metric=dict(required=False, type='str'),
log_adjacency=dict(required=False, type='str',
choices=['log', 'detail', 'default']),
timer_throttle_lsa_start=dict(required=False, type='str'),
timer_throttle_lsa_hold=dict(required=False, type='str'),
timer_throttle_lsa_max=dict(required=False, type='str'),
timer_throttle_spf_start=dict(required=False, type='str'),
timer_throttle_spf_hold=dict(required=False, type='str'),
timer_throttle_spf_max=dict(required=False, type='str'),
auto_cost=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
state = module.params['state']
args = [
'vrf',
'ospf',
'router_id',
'default_metric',
'log_adjacency',
'timer_throttle_lsa_start',
'timer_throttle_lsa_hold',
'timer_throttle_lsa_max',
'timer_throttle_spf_start',
'timer_throttle_spf_hold',
'timer_throttle_spf_max',
'auto_cost'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key != 'interface':
if str(value).lower() == 'true':
value = True
elif str(value).lower() == 'false':
value = False
elif str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key)
if value is None:
value = 'default'
if existing.get(key) or (not existing.get(key) and value):
proposed[key] = value
result = {}
if state == 'present' or (state == 'absent' and existing):
candidate = CustomNetworkConfig(indent=3)
invoke('state_%s' % state, module, existing, proposed, candidate)
response = load_config(module, candidate)
result.update(response)
else:
result['updates'] = []
if module._verbosity > 0:
end_state = invoke('get_existing', module, args)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed_args
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
ajayaa/keystone | keystone/version/service.py | 2 | 4231 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import sys
from oslo_config import cfg
from oslo_log import log
from paste import deploy
import routes
from keystone import assignment
from keystone import auth
from keystone import catalog
from keystone.common import wsgi
from keystone import credential
from keystone import endpoint_policy
from keystone import identity
from keystone import policy
from keystone import resource
from keystone import token
from keystone import trust
from keystone.version import controllers
from keystone.version import routers
CONF = cfg.CONF
LOG = log.getLogger(__name__)
def loadapp(conf, name):
# NOTE(blk-u): Save the application being loaded in the controllers module.
# This is similar to how public_app_factory() and v3_app_factory()
# register the version with the controllers module.
controllers.latest_app = deploy.loadapp(conf, name=name)
return controllers.latest_app
def fail_gracefully(f):
"""Logs exceptions and aborts."""
@functools.wraps(f)
def wrapper(*args, **kw):
try:
return f(*args, **kw)
except Exception as e:
LOG.debug(e, exc_info=True)
# exception message is printed to all logs
LOG.critical(e)
sys.exit(1)
return wrapper
@fail_gracefully
def public_app_factory(global_conf, **local_conf):
controllers.register_version('v2.0')
return wsgi.ComposingRouter(routes.Mapper(),
[assignment.routers.Public(),
token.routers.Router(),
routers.VersionV2('public'),
routers.Extension(False)])
@fail_gracefully
def admin_app_factory(global_conf, **local_conf):
controllers.register_version('v2.0')
return wsgi.ComposingRouter(routes.Mapper(),
[identity.routers.Admin(),
assignment.routers.Admin(),
token.routers.Router(),
resource.routers.Admin(),
routers.VersionV2('admin'),
routers.Extension()])
@fail_gracefully
def public_version_app_factory(global_conf, **local_conf):
return wsgi.ComposingRouter(routes.Mapper(),
[routers.Versions('public')])
@fail_gracefully
def admin_version_app_factory(global_conf, **local_conf):
return wsgi.ComposingRouter(routes.Mapper(),
[routers.Versions('admin')])
@fail_gracefully
def v3_app_factory(global_conf, **local_conf):
controllers.register_version('v3')
mapper = routes.Mapper()
sub_routers = []
_routers = []
# NOTE(dstanek): Routers should be ordered by their frequency of use in
# a live system. This is due to the routes implementation. The most
# frequently used routers should appear first.
router_modules = [auth,
assignment,
catalog,
credential,
identity,
policy,
resource]
if CONF.trust.enabled:
router_modules.append(trust)
if CONF.endpoint_policy.enabled:
router_modules.append(endpoint_policy)
for module in router_modules:
routers_instance = module.routers.Routers()
_routers.append(routers_instance)
routers_instance.append_v3_routers(mapper, sub_routers)
# Add in the v3 version api
sub_routers.append(routers.VersionV3('public', _routers))
return wsgi.ComposingRouter(mapper, sub_routers)
| apache-2.0 |
JT5D/Alfred-Popclip-Sublime | Sublime Text 2/Python PEP8 Autoformat/libs/lib2to3/fixes/fix_print.py | 7 | 2839 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for print.
Change:
'print' into 'print()'
'print ...' into 'print(...)'
'print ... ,' into 'print(..., end=" ")'
'print >>x, ...' into 'print(..., file=x)'
No changes are applied if print_function is imported from __future__
"""
# Local imports
from .. import patcomp
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, Comma, String, is_tuple
parend_expr = patcomp.compile_pattern(
"""atom< '(' [atom|STRING|NAME] ')' >"""
)
class FixPrint(fixer_base.BaseFix):
PATTERN = """
simple_stmt< any* bare='print' any* > | print_stmt
"""
def transform(self, node, results):
assert results
bare_print = results.get("bare")
if bare_print:
# Special-case print all by itself
bare_print.replace(Call(Name(u"print"), [],
prefix=bare_print.prefix))
return
assert node.children[0] == Name(u"print")
args = node.children[1:]
if len(args) == 1 and parend_expr.match(args[0]):
# We don't want to keep sticking parens around an
# already-parenthesised expression.
return
sep = end = file = None
if args and args[-1] == Comma():
args = args[:-1]
end = " "
if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"):
assert len(args) >= 2
file = args[1].clone()
args = args[3:] # Strip a possible comma after the file expression
# Now synthesize a print(args, sep=..., end=..., file=...) node.
l_args = [arg.clone() for arg in args]
if l_args:
l_args[0].prefix = u""
if sep is not None or end is not None or file is not None:
if sep is not None:
self.add_kwarg(l_args, u"sep", String(repr(sep)))
if end is not None:
self.add_kwarg(l_args, u"end", String(repr(end)))
if file is not None:
self.add_kwarg(l_args, u"file", file)
n_stmt = Call(Name(u"print"), l_args)
n_stmt.prefix = node.prefix
return n_stmt
def add_kwarg(self, l_nodes, s_kwd, n_expr):
# XXX All this prefix-setting may lose comments (though rarely)
n_expr.prefix = u""
n_argument = pytree.Node(self.syms.argument,
(Name(s_kwd),
pytree.Leaf(token.EQUAL, u"="),
n_expr))
if l_nodes:
l_nodes.append(Comma())
n_argument.prefix = u" "
l_nodes.append(n_argument)
| gpl-2.0 |
nilouco/dpAutoRigSystem | dpAutoRigSystem/Extras/sqSpaceSwitcher.py | 1 | 61786 | try:
import math
import maya.cmds as cmds
import pymel.core as pymel
import maya.OpenMaya as om
from Ui import uiSpaceSwitcher as uiSpaceSwitcher
reload(uiSpaceSwitcher)
from maya import OpenMayaUI
try:
from sstk.libs.libQt import QtCore, QtWidgets
from sstk.libs import libSerialization
except ImportError:
from ..Vendor.Qt import QtCore, QtGui, QtWidgets, QtCompat
from ..Modules.Library import libSerialization
from functools import partial
except Exception as e:
print "Error: importing python modules!!!\n",
print e
# TODO - Fix warning message on scene open when the tool is already (Don't seem to cause any problem)
# TODO - Need more testing
# global variables to this module:
CLASS_NAME = "SpaceSwitcher"
TITLE = "m071_SpaceSwitcher"
DESCRIPTION = "m072_SpaceSwitcherDesc"
ICON = "/Icons/dp_spaceSwitcher.png"
class QDoubleEmptyStringValidator(QtGui.QIntValidator):
"""
Override the validator to be able to have an acceptable finish edit signal fired on empty string
"""
def __init__(self, *args, **kwargs):
super(QDoubleEmptyStringValidator, self).__init__(*args, **kwargs)
def validate(self, _textInput, _pos):
validState = super(QDoubleEmptyStringValidator, self).validate(_textInput, _pos)
if validState[0] == QtGui.QValidator.Invalid or validState[0] == QtGui.QValidator.Intermediate:
if _textInput == "":
return QtGui.QValidator.Acceptable, validState[1], validState[2]
else:
return validState
else:
return validState
class SpaceSwitcherLogic(object):
"""
This class is used to setup a SpaceSwitch system on a node
"""
WORLD_NODE_NAME = "World_SpaceSwitcher"
def __init__(self):
self.aDrivers = [] # List of parent in the system
self.nDriven = None # Base constrained objet (The constraint will no be set on this object
self.nSwConst = None # SpaceSwitch constraint for the system
self.nSwConstRecept = None # Constrained node
self.aFreeIndex = [] # List of free index (Can only happen when a item is removed) in the parent constraint
self.sSysName = "SpaceSwitcher_" # Name of the system
tempWorld = pymel.ls(self.WORLD_NODE_NAME)
if tempWorld:
self.worldNode = tempWorld[0]
else:
self.worldNode = None
def setup_space_switch(self, nDriven=None, aDrivers=None, bCreateWolrdNode=False, bCreateParent=True):
"""
Setup a new space switch system on the node
:param nDriven:
:param aDrivers:
:param bCreateWolrdNode:
:param bCreateParent:
"""
aCurSel = pymel.selected()
if aDrivers is not None:
aParent = aCurSel[0:-1]
else:
aParent = aDrivers
bContinue = False
# Create the worldNode
if not self.worldNode and bCreateWolrdNode:
self.worldNode = pymel.createNode("transform", n=self.WORLD_NODE_NAME)
self.worldNode.visibility.set(False)
for pAttr in self.worldNode.listAttr(keyable=True):
pymel.setAttr(pAttr, keyable=False, lock=True)
self.worldNode.hiddenInOutliner = True
if self.worldNode:
self.aDrivers.append(self.worldNode)
if not nDriven:
if len(aCurSel) == 0:
pymel.informBox("Space Switcher", "You need to choose at least the node to constraint")
# The user only selected the driven node, so create a space switch between it's parent and the world
elif len(aCurSel) == 1:
self.nDriven = aCurSel[0]
bContinue = True
else:
self.nDriven = aCurSel[-1]
bContinue = True
else:
self.nDriven = nDriven
bContinue = True
if bContinue:
self.sSysName += nDriven.name()
sStripName = str(self.nDriven.stripNamespace()).replace(pymel.other.NameParser.PARENT_SEP, "")
# Setup the intermediate node to manage the spaceSwitch
if bCreateParent:
self.nSwConstRecept = pymel.createNode("transform", ss=True)
mDriven = self.nDriven.getMatrix(worldSpace=True)
self.nSwConstRecept.setMatrix(mDriven, worldSpace=True)
self.nSwConstRecept.rename(sStripName + "_Const_Grp")
self.nDriven.setParent(self.nSwConstRecept)
else:
self.nSwConstRecept = self.nDriven.getParent()
# Create the parent constraint for the first node, but add the other target manually
if bCreateWolrdNode:
self.nSwConst = pymel.parentConstraint(self.worldNode, self.nSwConstRecept,
n=sStripName + "_SpaceSwitch_Const", mo=True)
else:
self.nSwConst = pymel.parentConstraint(aParent[0], self.nSwConstRecept,
n=sStripName + "_SpaceSwitch_Const", mo=True)
self.aDrivers.append(aParent[0])
# Remove the first parent setuped before
aParent = aParent[1:]
self.nSwConst.getWeightAliasList()[0].set(0.0)
# Setup the first key for the current activate constraint, targets offset and rest position
if pymel.referenceQuery(self.nDriven, isNodeReferenced=True):
pymel.setKeyframe(self.nSwConst.getWeightAliasList()[0], t=0, ott="step")
pymel.setKeyframe(self.nSwConst.target[0].targetOffsetTranslate, t=0, ott="step")
pymel.setKeyframe(self.nSwConst.target[0].targetOffsetRotate, t=0, ott="step")
pymel.setKeyframe(self.nSwConst.restTranslate, t=0, ott="step")
pymel.setKeyframe(self.nSwConst.restRotate, t=0, ott="step")
if aParent:
self.add_target(aParent, firstSetup=True)
# else: #If this is the only parent setuped, automaticly switch to it
# Do not switch in a non-reference scene to prevent problem with referenced object
# if pymel.referenceQuery(self.nDriven, isNodeReferenced=True):
# self.do_switch(0)
pymel.select(nDriven)
def is_parent_exist(self, aNewParentList):
"""
Look if a node is already a possible parent in the system
:param aNewParentList:
"""
aExistTgt = self.nSwConst.getTargetList()
for nParent in aNewParentList:
if nParent in aExistTgt:
return True
return False
def _get_adjusted_index(self, _iCurIndex):
"""
Return the good index in the parent constraint to prevent any problem if
one parent have been removed from it
"""
if _iCurIndex in self.aFreeIndex:
return self._get_adjusted_index(_iCurIndex + 1)
else:
return _iCurIndex
pass
def add_target(self, aNewParent, firstSetup=False):
"""
Add a new target to the space switch system
:param aNewParent:
:param firstSetup:
"""
aExistTgt = self.nSwConst.getTargetList()
for nParent in aNewParent:
sStripParentName = str(nParent.stripNamespace()).replace(pymel.other.NameParser.PARENT_SEP, "")
# Check if we need to use an free index that could exist after some target removing
if len(self.aFreeIndex) != 0:
iNewIdx = self.aFreeIndex[0]
self.aFreeIndex.pop(0)
else:
iNewIdx = len(self.nSwConst.getWeightAliasList())
# Ensure that the parent doesn't already exist in the drivers list
if not nParent in aExistTgt:
# First, calculate the offset between the parent and the driven node
vTrans = self._get_tm_offset(nParent, _type="t")
vRot = self._get_tm_offset(nParent, _type="r")
# Connect the new target manually in the parent constraint
if iNewIdx == 0:
self.nSwConst.addAttr(sStripParentName + "W" + str(iNewIdx), at="double",
min=0, max=1, dv=1, k=True, h=False)
else:
self.nSwConst.addAttr(sStripParentName + "W" + str(iNewIdx), at="double",
min=0, max=1, dv=0, k=True, h=False)
pymel.connectAttr(nParent.parentMatrix, self.nSwConst.target[iNewIdx].targetParentMatrix)
pymel.connectAttr(nParent.scale, self.nSwConst.target[iNewIdx].targetScale)
pymel.connectAttr(nParent.rotateOrder, self.nSwConst.target[iNewIdx].targetRotateOrder)
pymel.connectAttr(nParent.rotate, self.nSwConst.target[iNewIdx].targetRotate)
pymel.connectAttr(nParent.rotatePivotTranslate, self.nSwConst.target[iNewIdx].targetRotateTranslate)
pymel.connectAttr(nParent.rotatePivot, self.nSwConst.target[iNewIdx].targetRotatePivot)
pymel.connectAttr(nParent.translate, self.nSwConst.target[iNewIdx].targetTranslate)
# Link the created attributes to the weight value of the target
nConstTgtWeight = pymel.Attribute(self.nSwConst.name() + "." + sStripParentName + "W" + str(iNewIdx))
pymel.connectAttr(nConstTgtWeight, self.nSwConst.target[iNewIdx].targetWeight)
# Set the offset information
self.nSwConst.target[iNewIdx].targetOffsetTranslate.targetOffsetTranslateX.set(vTrans[0])
self.nSwConst.target[iNewIdx].targetOffsetTranslate.targetOffsetTranslateY.set(vTrans[1])
self.nSwConst.target[iNewIdx].targetOffsetTranslate.targetOffsetTranslateZ.set(vTrans[2])
self.nSwConst.target[iNewIdx].targetOffsetRotate.targetOffsetRotateX.set(vRot[0])
self.nSwConst.target[iNewIdx].targetOffsetRotate.targetOffsetRotateY.set(vRot[1])
self.nSwConst.target[iNewIdx].targetOffsetRotate.targetOffsetRotateZ.set(vRot[2])
# Do not key an non-referenced object to prevent problem when referencing the scene
if pymel.referenceQuery(self.nSwConst, isNodeReferenced=True):
pymel.setKeyframe(nConstTgtWeight, t=0, ott="step")
pymel.setKeyframe(self.nSwConst.target[iNewIdx].targetOffsetTranslate, t=0, ott="step")
pymel.setKeyframe(self.nSwConst.target[iNewIdx].targetOffsetRotate, t=0, ott="step")
self.aDrivers.insert(iNewIdx, nParent)
else:
print("Warning: " + nParent.name() + " is already a driver for " + self.nDriven)
# If this is the only parent and it is not referenced, do the switch right now on the frame the user is
# if (len(aNewParent) == 1 and not firstSetup and pymel.referenceQuery(self.nDriven, isNodeReferenced=True)):
# self.do_switch(iNbTgt - 1) #Since a new target have been added, iNbTgt equal the index to switch too
def remove_target(self, iIdx, _all=False):
if _all:
# Remove the constraint and reset some variable
pymel.delete(self.nSwConst)
self.aDrivers = []
self.nDriven = None
self.nSwConst = None # SpaceSwitch constraint for the system
self.nSwConstRecept = None # Space Switch receiver
self.aFreeIndex = [] # List of free index (Can only happen when a item is removed) in the parent constraint
else:
aExistTgt = self.nSwConst.getTargetList()
iNbTgt = len(aExistTgt)
# Before removing the target, we need to readjust the weight value and offset if needed
aWeight = self.nSwConst.getWeightAliasList()
if iNbTgt > iIdx:
# Get all the frames where the removed index is active
if iIdx == -1:
aKeyTime = pymel.keyframe(self.nSwConst.restTranslate.restTranslateX, q=True)
else:
aKeyTime = pymel.keyframe(aWeight[iIdx], q=True)
# Cut the keys of all weight at time where the removed target was active.
for t in aKeyTime:
if aWeight[iIdx].get(time=t) == 1.0:
for w in aWeight:
try:
pymel.cutKey(w, time=t)
except:
pass
# Remove the target
pTgt = aExistTgt[iIdx]
pymel.parentConstraint(pTgt, self.nSwConstRecept, e=True, rm=True)
self.aFreeIndex.append(iIdx)
self.aFreeIndex.sort()
self.aDrivers.pop(iIdx)
# Update all constraint when removing one
self.update_constraint_keys()
def _get_tm_offset(self, _nParent, _nDriven=None, _type="t"):
"""
Get the offset between the driven and a driver node
"""
if _nDriven is None:
_nDriven = self.nSwConstRecept
mStart = om.MMatrix()
mEnd = om.MMatrix()
wmStart = _nParent.worldMatrix.get().__melobject__()
wmEnd = _nDriven.worldMatrix.get().__melobject__()
om.MScriptUtil().createMatrixFromList(wmStart, mStart)
om.MScriptUtil().createMatrixFromList(wmEnd, mEnd)
mOut = om.MTransformationMatrix(mEnd * mStart.inverse())
if _type == "t":
# Extract Translation
vTran = om.MVector(mOut.getTranslation(om.MSpace.kTransform))
vTranPymel = [vTran.x, vTran.y, vTran.z]
return vTranPymel
if _type == "r":
# Extract Rotation
ro = _nDriven.rotateOrder.get()
vRot = om.MEulerRotation(mOut.eulerRotation().reorder(ro))
vRotDeg = [math.degrees(vRot.x), math.degrees(vRot.y), math.degrees(vRot.z)]
return vRotDeg
def update_constraint_keys(self, _updateAll=False):
"""
Update all key in the constraint to refresh the offset when needed and prevent any snap
:param _updateAll:
"""
aWeight = self.nSwConst.getWeightAliasList()
fCurTime = pymel.currentTime()
# List to stock information we need to update in the good frame order
aKeyIndex = []
# Check to collect the rest pos/rot key already created
aKeyTime = pymel.keyframe(self.nSwConst.restTranslate.restTranslateX, q=True)
for t in aKeyTime:
if t > fCurTime or _updateAll:
aKeyIndex.append((t, -1))
# Check to collect all constraint keys we would need to update
for i, w in enumerate(aWeight):
aKeyTime = pymel.keyframe(w, q=True)
for t in aKeyTime:
if t > fCurTime or _updateAll:
if w.get(time=t) == 1.0: # Only update the key if the constraint is active
aKeyIndex.append((t, i))
# Sort the key index list of tuple to ensure we update the data in the good frame order
aKeyIndex.sort()
pymel.refresh(su=True)
for t, i in aKeyIndex:
pymel.setCurrentTime(t - 1)
if i >= 0:
iAjustedIdx = self._get_adjusted_index(i)
# Compute the offset between the parent and the driver
vTrans = self._get_tm_offset(self.aDrivers[i], _type="t")
vRot = self._get_tm_offset(self.aDrivers[i], _type="r")
pymel.setCurrentTime(t)
# Set the offset information
self.nSwConst.target[iAjustedIdx].targetOffsetTranslate.targetOffsetTranslateX.set(vTrans[0])
self.nSwConst.target[iAjustedIdx].targetOffsetTranslate.targetOffsetTranslateY.set(vTrans[1])
self.nSwConst.target[iAjustedIdx].targetOffsetTranslate.targetOffsetTranslateZ.set(vTrans[2])
self.nSwConst.target[iAjustedIdx].targetOffsetRotate.targetOffsetRotateX.set(vRot[0])
self.nSwConst.target[iAjustedIdx].targetOffsetRotate.targetOffsetRotateY.set(vRot[1])
self.nSwConst.target[iAjustedIdx].targetOffsetRotate.targetOffsetRotateZ.set(vRot[2])
# Update keys
pymel.setKeyframe(self.nSwConst.target[iAjustedIdx].targetOffsetTranslate, t=t, ott="step")
pymel.setKeyframe(self.nSwConst.target[iAjustedIdx].targetOffsetRotate, t=t, ott="step")
pymel.keyTangent(self.nSwConst.target[iAjustedIdx].targetOffsetTranslate, t=t, ott="step") # Force step
pymel.keyTangent(self.nSwConst.target[iAjustedIdx].targetOffsetRotate, t=t, ott="step") # Force step
else:
# Get the offset information from the constraint trans and rot at the time before the key
vTrans = self.nSwConst.constraintTranslate.get()
vRot = self.nSwConst.constraintRotate.get()
pymel.setCurrentTime(t)
# Set the offset information
self.nSwConst.restTranslate.set(vTrans)
self.nSwConst.restRotate.set(vRot)
# Update keys
pymel.setKeyframe(self.nSwConst.restTranslate, t=t, ott="step")
pymel.setKeyframe(self.nSwConst.restRotate, t=t, ott="step")
pymel.keyTangent(self.nSwConst.restTranslate, t=t, ott="step") # Force step
pymel.keyTangent(self.nSwConst.restRotate, t=t, ott="step") # Force step
pymel.setCurrentTime(fCurTime)
pymel.refresh(su=False)
def do_switch(self, iIdx):
"""
Switch the parent in which the driven node is constrained. Ensure that the switch is done without any snap
of the driven object
:param iIdx:
"""
fCurTime = pymel.currentTime()
iActiveWeight = None
aWeight = self.nSwConst.getWeightAliasList()
# If none is set to 1.0, the value will be -1 which represent the current parent
for i, fValue in enumerate(aWeight):
# Get the value at the frame before and do not update the offset if we return to same one
if fValue.get(time=fCurTime - 1) == 1.0:
iActiveWeight = i
break
# Safety check to ensure that the rest data will be keyed
if iActiveWeight is None:
aRestKey = pymel.keyframe(self.nSwConst.restTranslate, q=True)
if len(aRestKey) > 0:
iActiveWeight = -1
with pymel.UndoChunk():
if iActiveWeight != iIdx: # Check is good, but we need to adjust the index after
# Update the constraint information for the offset of the parent on which we will switch
if iIdx == -1:
pymel.parentConstraint(self.nSwConst, mo=True, e=True)
pymel.setKeyframe(self.nSwConst.restTranslate, t=fCurTime, ott="step")
pymel.setKeyframe(self.nSwConst.restRotate, t=fCurTime, ott="step")
pymel.keyTangent(self.nSwConst.restTranslate, t=fCurTime, ott="step") # Force step
pymel.keyTangent(self.nSwConst.restRotate, t=fCurTime, ott="step") # Force step
else:
iAdjustedIdx = self._get_adjusted_index(iIdx)
pymel.parentConstraint(self.aDrivers[iIdx], self.nSwConst, mo=True, e=True)
pymel.setKeyframe(self.nSwConst.target[iAdjustedIdx].targetOffsetTranslate, t=fCurTime, ott="step")
pymel.setKeyframe(self.nSwConst.target[iAdjustedIdx].targetOffsetRotate, t=fCurTime, ott="step")
pymel.keyTangent(self.nSwConst.target[iAdjustedIdx].targetOffsetTranslate, t=fCurTime,
ott="step") # Force step
pymel.keyTangent(self.nSwConst.target[iAdjustedIdx].targetOffsetRotate, t=fCurTime,
ott="step") # Force step
if iIdx == -1:
for wAlias in aWeight:
wAlias.set(0.0)
else:
for i, wAlias in enumerate(aWeight):
if i == iIdx:
wAlias.set(1.0)
else:
wAlias.set(0.0)
# Set a keyframe on the weight to keep the animation
pymel.setKeyframe(aWeight, t=fCurTime, ott="step")
pymel.keyTangent(aWeight, ott="step") # Force step
self.update_constraint_keys()
def _adjust_firstKey(self, iTime, vRestT, vRestRot):
"""
Adjust the offset of the first constraint key in the system to prevent a snap when we move keys
"""
pymel.setCurrentTime(iTime)
aWeight = self.nSwConst.getWeightAliasList()
for i, w in enumerate(aWeight):
if w.get() == 1:
iParentIdx = self._get_adjusted_index(i)
# Create a node as a fake parent to have an easiest way to extract the matrix
nTempDriven = pymel.createNode("transform")
nTempDriven.setTranslation(vRestT, space="world")
nTempDriven.setRotation(vRestRot, space="world")
vTrans = self._get_tm_offset(self.aDrivers[iParentIdx], _nDriven=nTempDriven, _type="t")
vRot = self._get_tm_offset(self.aDrivers[iParentIdx], _nDriven=nTempDriven, _type="r")
self.nSwConst.target[iParentIdx].targetOffsetTranslate.targetOffsetTranslateX.set(vTrans[0])
self.nSwConst.target[iParentIdx].targetOffsetTranslate.targetOffsetTranslateY.set(vTrans[1])
self.nSwConst.target[iParentIdx].targetOffsetTranslate.targetOffsetTranslateZ.set(vTrans[2])
self.nSwConst.target[iParentIdx].targetOffsetRotate.targetOffsetRotateX.set(vRot[0])
self.nSwConst.target[iParentIdx].targetOffsetRotate.targetOffsetRotateY.set(vRot[1])
self.nSwConst.target[iParentIdx].targetOffsetRotate.targetOffsetRotateZ.set(vRot[2])
pymel.setKeyframe(self.nSwConst.target[iParentIdx].targetOffsetTranslate, t=iTime, ott="step")
pymel.setKeyframe(self.nSwConst.target[iParentIdx].targetOffsetRotate, t=iTime, ott="step")
pymel.keyTangent(self.nSwConst.target[iParentIdx].targetOffsetTranslate, t=iTime,
ott="step") # Force step
pymel.keyTangent(self.nSwConst.target[iParentIdx].targetOffsetRotate, t=iTime, ott="step") # Force step
pymel.delete(nTempDriven)
def moveKey(self, _iNewFrame, _iOldFrame):
"""
Move a constraint key to another frame and ensure to update all constraint offset at the same time
:param _iNewFrame:
:param _iOldFrame:
"""
if _iNewFrame != _iOldFrame:
pymel.refresh(su=True)
with pymel.UndoChunk():
fCurTime = pymel.currentTime()
# Check to collect all constraint keys we would need to update
aAllKeysConst = pymel.keyframe(self.nSwConst, q=True)
aAllKeysConst.sort()
# Ensure to update all the keys that are after the move starting at the lowest frame change
if _iNewFrame < _iOldFrame:
iAdjustFrame = _iNewFrame
else:
iAdjustFrame = _iOldFrame
# Get the rest data before moving the key in case we need to adjust the first frame
pymel.setCurrentTime(iAdjustFrame)
vRestT = self.nSwConst.constraintTranslate.get()
vRestR = self.nSwConst.constraintRotate.get()
pymel.keyframe(self.nSwConst, time=(_iOldFrame, _iOldFrame), o="over", timeChange=_iNewFrame)
# Handle case where the move key become the first one in the animation
if iAdjustFrame <= aAllKeysConst[0]:
self._adjust_firstKey(iAdjustFrame, vRestT, vRestR)
pymel.setCurrentTime(-1)
self.update_constraint_keys()
pymel.setCurrentTime(fCurTime)
pymel.refresh(su=False)
def deleteKey(self, _iFrame):
"""
Delete a constraint key and ensure everything is correctly ajusted in the animation
"""
pymel.refresh(su=True)
with pymel.UndoChunk():
fCurTime = pymel.currentTime()
# Check to collect all constraint keys we would need to update
aAllKeysConst = pymel.keyframe(self.nSwConst, q=True)
aAllKeysConst.sort()
# Get the rest data before deleting the key in case we need to adjust the first frame
pymel.setCurrentTime(_iFrame)
vRestT = self.nSwConst.constraintTranslate.get()
vRestR = self.nSwConst.constraintRotate.get()
pymel.cutKey(self.nSwConst, time=(_iFrame, _iFrame))
# Handle case where the cut key become the first one in the animation
if _iFrame == aAllKeysConst[0]:
self._adjust_firstKey(_iFrame, vRestT, vRestR)
pymel.setCurrentTime(_iFrame)
self.update_constraint_keys()
pymel.setCurrentTime(fCurTime)
pymel.refresh(su=False)
# src: https://knowledge.autodesk.com/search-result/caas/CloudHelp/cloudhelp/2015/ENU/Maya-SDK/files/
# GUID-3F96AF53-A47E-4351-A86A-396E7BFD6665-htm.html
def getMayaWindow():
"""
Return the pointer to maya window
"""
OpenMayaUI.MQtUtil.mainWindow()
ptr = OpenMayaUI.MQtUtil.mainWindow()
return QtCompat.wrapInstance(long(ptr), QtWidgets.QWidget)
class Mode:
def __init__(self):
pass
Inactive = 0,
Create = 1,
Add = 2,
Switch = 3,
SwitchSelect = 4,
Remove = 5
class SpaceSwitcherDialog(QtWidgets.QMainWindow):
def __init__(self, parent=getMayaWindow(), *args, **kwargs):
super(SpaceSwitcherDialog, self).__init__(parent)
self.ID_COL_FRAME = 0
self.ID_COL_PARENT = 1
self.ID_COL_ACTION = 2
self.sOriginalParent = "Original Parent"
self.aConstList = ["XYZ", "XY", "XZ", "YZ", "X", "Y", "Z", "Not Constrained"]
self.ui = uiSpaceSwitcher.Ui_win_main()
self.ui.setupUi(self)
# Setup the base list of parent
self.createModel = QtGui.QStandardItemModel(self.ui.lstParent)
self.parentItem = QtGui.QStandardItem(self.sOriginalParent)
self.parentItem.setCheckable(False)
self.parentItem.setEditable(False)
self.createModel.appendRow(self.parentItem)
self.ui.lstParent.setModel(self.createModel)
self.ui.btnAction.setEnabled(False)
self.ui.btnAction.setText("Select a Node")
self.ui.cbSysList.addItem("--- Select a system ---")
self.ui.lstParent.setEnabled(False)
self.ui.cbPosition.addItems(self.aConstList)
self.ui.cbPosition.setEnabled(False)
self.ui.cbRotation.addItems(self.aConstList)
self.ui.cbRotation.setEnabled(False)
# Intern variable
self.aEventCallbacksID = []
self.pTimeJobCallback = None
self.pSceneUpdateID = None
self.mode = Mode.Inactive
self.nSelDriven = None
self.aSelDrivers = []
self.pSelSpSys = None
self.toRemove = []
self.aSceneSpaceSwitch = []
self.aConstrainedFrame = []
self.bInSelChanged = False
self.bBlockSelJob = False
self.colorTemplate = "<font color={0}>{1}</font>"
self._setup_callbacks()
# Force the tool to check the selection on it's opening
self.refresh()
def refresh(self):
"""
Refresh the tool information
"""
self.nSelDriven = None
self._fetch_system_from_scene()
self._callback_selection_change()
def _setup_callbacks(self):
"""
Setup the button callback and also a callback in maya to know when a selection is changed
"""
self.ui.btnAction.pressed.connect(self._event_btnAction_pressed)
self.ui.btnUpdateAll.pressed.connect(self._event_btnUpdateAll_pressed)
self.ui.lstParent.clicked.connect(self._event_lstParent_selChanged)
self.ui.cbSysList.currentIndexChanged.connect(self._event_cbSys_selChanged)
self.ui.cbPosition.currentIndexChanged.connect(self._event_cbPosition_selChanged)
self.ui.cbRotation.currentIndexChanged.connect(self._event_cbRotation_selChanged)
self.ui.btnRefresh.pressed.connect(self._event_btnRefresh_pressed)
'''
self.iJobSelChange = pymel.scriptJob(event=('SelectionChanged', self._scriptJob_selection_change),
compressUndo=True)
self.iJobSceneOpen = pymel.scriptJob(event=('SceneOpened', self._scriptJob_scene_opened), compressUndo=False)
# Do not put a job on the undo, since it cause problem with undo's themselves
self.iJobUndo = pymel.scriptJob(event=('Undo', self._scriptJob_scene_undo), compressUndo=False)
'''
pUndoID = om.MEventMessage.addEventCallback("Undo", self._callback_scene_undoRedo)
pRedoID = om.MEventMessage.addEventCallback("Redo", self._callback_scene_undoRedo)
pSelectionChangeID = om.MEventMessage.addEventCallback("SelectionChanged", self._callback_selection_change)
self.pSceneUpdateID = om.MSceneMessage.addCallback(om.MSceneMessage.kSceneUpdate, self._callback_scene_updated)
self.pTimeJobCallback = om.MDGMessage.addTimeChangeCallback(self._scriptJob_timeChanged, "onTimeChange")
self.aEventCallbacksID = [pUndoID, pRedoID, pSelectionChangeID]
self.ui.tblFrameInfo.paintEvent = self._tblFrame_paintEvent
def _fetch_system_from_scene(self):
"""
Get all SpaceSwitch system in the scene
"""
self.aSceneSpaceSwitch = []
self.ui.cbSysList.clear()
self.ui.cbSysList.addItem("--- Select a system ---")
lstNetworkNode = libSerialization.getNetworksByClass(SpaceSwitcherLogic.__name__)
for pNet in lstNetworkNode:
pData = libSerialization.import_network(pNet)
# Check to ensure the data is valid, delete it if not
if pData.nDriven is not None and pData.nSwConst is not None and pData.nSwConstRecept is not None:
self.ui.cbSysList.addItem(pData.nDriven.name())
self.aSceneSpaceSwitch.append(pData)
else:
print("System {0} will be deleted because some data is invalid. Driven = {1}, Const = {2}, "
"Const Recept = {3}".format(pNet, pData.nDriven, pData.nSwConst, pData.nSwConstRecept))
pymel.delete(pNet)
def _set_mode_info(self, _mode, _bButtonEnabled):
"""
Set the tool mode information and ensure all button are correctly activated if needed
"""
bIsRef = False
if self.pSelSpSys:
bIsRef = pymel.referenceQuery(self.pSelSpSys.nSwConst, isNodeReferenced=True)
self.ui.lblStatus.setText("Current Mode --> ")
self.mode = _mode
self.ui.btnAction.setEnabled(_bButtonEnabled)
# Set the status label info depending of the current mode
if _mode == Mode.Create:
self.ui.lblStatus.setText(self.ui.lblStatus.text() + self.colorTemplate.format("yellow", "First Setup"))
self.ui.btnAction.setText("Setup")
elif _mode == Mode.Add:
if not bIsRef:
self.ui.lblStatus.setText(self.ui.lblStatus.text() + self.colorTemplate.format("green", "Add Parent"))
self.ui.btnAction.setText("Add")
else:
self.ui.lblStatus.setText(self.ui.lblStatus.text() +
self.colorTemplate.format("Gray", "Add Parent (Blocked Reference)"))
self.ui.btnAction.setText("Add (Blocked)")
self.ui.btnAction.setEnabled(False)
elif _mode == Mode.Switch or _mode == Mode.SwitchSelect:
self.ui.lblStatus.setText(self.ui.lblStatus.text() + self.colorTemplate.format("green", "Switch Parent"))
self.ui.btnAction.setText("Switch")
elif _mode == Mode.Remove:
if not bIsRef:
self.ui.lblStatus.setText(self.ui.lblStatus.text() + self.colorTemplate.format("red", "Remove Parent"))
self.ui.btnAction.setText("Remove")
else:
self.ui.lblStatus.setText(self.ui.lblStatus.text() +
self.colorTemplate.format("Gray", "Remove Parent (Blocked Reference)"))
self.ui.btnAction.setText("Remove (Blocked)")
self.ui.btnAction.setEnabled(False)
else:
self.ui.lblStatus.setText(self.ui.lblStatus.text() + "Inactive")
self.ui.btnAction.setText("Select a Node")
def _callback_selection_change(self, *args):
"""
Manage the selection change to know which action the user want to do. The remove action
need to be implemented another way
"""
if not self.bBlockSelJob:
self.bInSelChanged = True
aCurSel = pymel.selected()
if len(aCurSel) == 0:
self.nSelDriven = None
self.aSelDrivers = []
elif len(aCurSel) == 1:
self.nSelDriven = aCurSel[0]
self.aSelDrivers = []
else:
self.nSelDriven = aCurSel[-1]
self.aSelDrivers = aCurSel[0:-1]
self._set_mode_info(Mode.Inactive, False)
self.pSelSpSys = None
if self.nSelDriven is not None:
# Look for existing space switcher system
for i, pSp in enumerate(self.aSceneSpaceSwitch):
if pSp.nDriven == self.nSelDriven:
self.pSelSpSys = pSp
break
self._update_info(self.pSelSpSys)
if self.pSelSpSys is None:
#Check to ensure that the callback will not catch a network node when we create a new system
if pymel.nodeType(self.nSelDriven) != "network":
nDrivenParent = self.nSelDriven.getParent()
if nDrivenParent is None and pymel.referenceQuery(self.nSelDriven, isNodeReferenced=True):
self._set_mode_info(Mode.Create, False)
else:
# TODO - Check if the parent can possibly receive a constraint on it
self._set_mode_info(Mode.Create, True)
else:
if self.aSelDrivers:
if not self.pSelSpSys.is_parent_exist(self.aSelDrivers): # If no selected parent already exist
self._set_mode_info(Mode.Add, True)
else:
if len(self.aSelDrivers) == 1:
self._set_mode_info(Mode.SwitchSelect, True)
iParentIdx = self.pSelSpSys.aDrivers.index(self.aSelDrivers[0])
pIdx = self.ui.lstParent.model().createIndex(iParentIdx + 1, 0)
self.ui.lstParent.selectionModel().select(pIdx, QtCore.QItemSelectionModel.Select)
else:
self._set_mode_info(Mode.Add, True)
else:
# If a parent is selected in the list, active the button to do the switch
pSel = self.ui.lstParent.selectedIndexes()
if pSel:
self._set_mode_info(Mode.Switch, True)
else:
self._set_mode_info(Mode.SwitchSelect, True)
pIdx = self.ui.lstParent.model().createIndex(0, 0)
self.ui.lstParent.selectionModel().select(pIdx, QtCore.QItemSelectionModel.Select)
else:
self._update_info(None)
self.bInSelChanged = False
def _callback_scene_updated(self, *args):
"""
Find all SpaceSwitcher system in the scene
"""
self._fetch_system_from_scene()
def _callback_scene_undoRedo(self, *args):
"""
Ensure to refresh the UI on a undo in the scene
"""
if self.pSelSpSys and pymel.selected():
self._update_info(self.pSelSpSys)
else:
self._update_info(None)
def _scriptJob_timeChanged(self, *args):
"""
Callbacks that trigger when the time change
"""
self.ui.tblFrameInfo.viewport().update()
def _tblFrame_paintEvent(self, event):
"""
Override the table paint event to redraw it when we need too
:param event:
"""
super(QtWidgets.QTableWidget, self.ui.tblFrameInfo).paintEvent(event)
iRowCount = self.ui.tblFrameInfo.rowCount()
iCurTime = int(pymel.currentTime())
for i in range(0, iRowCount):
iFrameAfter = 9999999999
pRow = self.ui.tblFrameInfo.item(i, self.ID_COL_FRAME)
if i < iRowCount - 1:
pRowAfter = self.ui.tblFrameInfo.item(i + 1, self.ID_COL_FRAME)
iFrameAfter = pRowAfter.data(QtCore.Qt.UserRole)
iFrame = pRow.data(QtCore.Qt.UserRole)
pWidget = self.ui.tblFrameInfo.cellWidget(i, self.ID_COL_FRAME)
pPal = pWidget.palette()
if iFrame <= iCurTime < iFrameAfter:
pPal.setColor(pWidget.backgroundRole(), QtCore.Qt.darkRed)
elif iCurTime < iFrame and i == 0:
pPal.setColor(pWidget.backgroundRole(), QtCore.Qt.darkRed)
else:
pPal.setColor(pWidget.backgroundRole(), QtCore.Qt.black)
pWidget.setPalette(pPal)
def closeEvent(self, *args, **kwargs):
"""
Try to kill the script job when the window is closed
:param args:
:param kwargs:
"""
try:
om.MDGMessage.removeCallback(self.pTimeJobCallback)
for pId in self.aEventCallbacksID:
om.MEventMessage.removeCallback(pId)
om.MSceneMessage.removeCallback(self.pSceneUpdateID)
except:
pass
def _update_info(self, pSpData):
"""
Small wrapper to update all needed info in the UI
"""
if pSpData:
iCurSys = self.aSceneSpaceSwitch.index(pSpData)
if iCurSys != None:
self.ui.cbSysList.setCurrentIndex(iCurSys + 1) # First item is empty
self.ui.btnUpdateAll.setEnabled(True)
else:
self.ui.cbSysList.setCurrentIndex(0)
self.ui.btnUpdateAll.setEnabled(False)
else:
self.ui.cbSysList.setCurrentIndex(0) # First item is empty
self.ui.btnUpdateAll.setEnabled(False)
self._update_lstParent(pSpData)
self._update_tblFrameInfo(pSpData)
self._update_cbAxis(pSpData)
def _update_cbAxis(self, pData):
"""
Update constrained axis info for selected system
"""
if pData is not None:
self.ui.cbPosition.setEnabled(True)
self.ui.cbRotation.setEnabled(True)
sXPos = ""
sYPos = ""
sZPos = ""
sXRot = ""
sYRot = ""
sZRot = ""
if self.pSelSpSys.nSwConstRecept.translateX.listConnections():
sXPos = "X"
if self.pSelSpSys.nSwConstRecept.translateY.listConnections():
sYPos = "Y"
if self.pSelSpSys.nSwConstRecept.translateZ.listConnections():
sZPos = "Z"
if self.pSelSpSys.nSwConstRecept.rotateX.listConnections():
sXRot = "X"
if self.pSelSpSys.nSwConstRecept.rotateY.listConnections():
sYRot = "Y"
if self.pSelSpSys.nSwConstRecept.rotateZ.listConnections():
sZRot = "Z"
sFinalPos = sXPos + sYPos + sZPos
sFinalRot = sXRot + sYRot + sZRot
if sFinalPos != "":
self.ui.cbPosition.setCurrentIndex(self.aConstList.index(sFinalPos))
else:
self.ui.cbPosition.setCurrentIndex(len(self.aConstList) - 1)
if sFinalPos != "":
self.ui.cbRotation.setCurrentIndex(self.aConstList.index(sFinalRot))
else:
self.ui.cbRotation.setCurrentIndex(len(self.aConstList) - 1)
else:
self.ui.cbPosition.setCurrentIndex(0)
self.ui.cbPosition.setCurrentIndex(0)
self.ui.cbPosition.setEnabled(False)
self.ui.cbRotation.setEnabled(False)
def _update_lstParent(self, pSpData):
"""
Update the parent list for the selected system
"""
self.createModel.clear()
if pSpData:
self.ui.lstParent.setEnabled(True)
self.createModel.appendRow(self.parentItem)
for iIdx, nParentInfo in enumerate(pSpData.aDrivers):
newParentItem = QtGui.QStandardItem(nParentInfo.name())
newParentItem.setEditable(False)
# Prevent any delete action when the sysem is referenced
if pymel.referenceQuery(self.pSelSpSys.nSwConst, isNodeReferenced=True):
newParentItem.setCheckable(False)
else:
newParentItem.setCheckable(True)
self.createModel.appendRow(newParentItem)
else:
self.ui.lstParent.setEnabled(False)
self.createModel.appendRow(self.parentItem)
def _update_tblFrameInfo(self, pSpData):
"""
Update the frame/parent info with the selected system
"""
# Clear the table info and refresh it
self.ui.tblFrameInfo.setRowCount(0)
self.aConstrainedFrame = []
if pSpData:
aWeight = pSpData.nSwConst.getWeightAliasList()
aZeroKey = [] # List of frame which have key at 0 on all parent
aPreventZeroKey = [] # List of frame we know it's not all parent to 0
aKeyParent = [] # List of tuple representing the frame with the parent index
# Check to collect all constraint keys we would need to update
for i, w in enumerate(aWeight):
aKeyTime = pymel.keyframe(w, q=True)
for iTime in aKeyTime:
if w.get(time=iTime) == 1.0: # Keep info about frame/parent
aKeyParent.append((iTime, i))
if iTime not in aPreventZeroKey:
aPreventZeroKey.append(iTime)
else:
if iTime not in aZeroKey:
aZeroKey.append(iTime) # Keep possible frame to be one without any parent active
for iTime in aZeroKey:
if iTime not in aPreventZeroKey:
aKeyParent.append((iTime, -1))
# Sort by Frame order (Need to be reversed to be in the good frame order)
aKeyParent.sort()
# Create a list of the parent name to use in the combo box that will be created
aParentName = [nParent.name() for nParent in pSpData.aDrivers]
aParentName.insert(0, self.sOriginalParent)
for pTblInfo in aKeyParent:
iNbRow = self.ui.tblFrameInfo.rowCount()
self.ui.tblFrameInfo.insertRow(iNbRow)
# Frame Field
pFrameCell = QtWidgets.QTableWidgetItem()
self.ui.tblFrameInfo.setItem(iNbRow, self.ID_COL_FRAME, pFrameCell)
edtFrame = QtWidgets.QLineEdit()
edtFrame.setAutoFillBackground(True)
edtFrame.setValidator(QDoubleEmptyStringValidator())
edtFrame.setText(str(int(pTblInfo[0])))
edtFrame.returnPressed.connect(partial(self._event_edtFrame_changed, iNbRow))
edtFrame.editingFinished.connect(partial(self._event_edtFrame_endEdit, iNbRow))
'''
Monkey patch the mouse event function to create a right click menu on it.
I could have redefined a class that inherit the QLineEdit class, but....
'''
edtFrame.mousePressEvent = partial(self._event_edtFrame_mousePress, iRow=iNbRow)
self.ui.tblFrameInfo.setCellWidget(iNbRow, self.ID_COL_FRAME, edtFrame)
pFrameCell.setData(QtCore.Qt.UserRole, int(pTblInfo[0]))
# Parent Field
pCellParent = QtWidgets.QTableWidgetItem()
self.ui.tblFrameInfo.setItem(iNbRow, self.ID_COL_PARENT, pCellParent)
cbParent = QtWidgets.QComboBox()
cbParent.setMaximumWidth(200)
cbParent.addItems(aParentName)
cbParent.setCurrentIndex(
pTblInfo[1] + 1) # Index is always +1 since original parent it -1 in the system
cbParent.currentIndexChanged.connect(partial(self._event_cbParent_indexChanged, iNbRow))
cbParent.wheelEvent = self._event_cbParent_wheel # Override the wheel event to prevent change with it
cbParent.setFocusPolicy(QtCore.Qt.ClickFocus)
self.ui.tblFrameInfo.setCellWidget(iNbRow, self.ID_COL_PARENT, cbParent)
pCellParent.setData(QtCore.Qt.UserRole, pTblInfo[1])
pCellAction = QtWidgets.QTableWidgetItem()
self.ui.tblFrameInfo.setItem(iNbRow, self.ID_COL_ACTION, pCellAction)
btnRemove = QtWidgets.QPushButton()
btnRemove.setText("Remove")
btnRemove.pressed.connect(partial(self._event_btnRemove_pressed, iNbRow))
self.ui.tblFrameInfo.setCellWidget(iNbRow, self.ID_COL_ACTION, btnRemove)
self.aConstrainedFrame.append(int(pTblInfo[0]))
self.ui.tblFrameInfo.resizeColumnToContents(self.ID_COL_PARENT)
def _event_edtFrame_mousePress(self, event, iRow=0):
"""
Generate a right click on the QLineEdit with the frame number to allow the user to delete a key
"""
if event.button() == QtCore.Qt.RightButton:
# Get data
pFrameCell = self.ui.tblFrameInfo.item(iRow, self.ID_COL_FRAME)
iFrame = pFrameCell.data(QtCore.Qt.UserRole)
menu = QtWidgets.QMenu()
action_sel_parent = menu.addAction('Remove')
action_sel_parent.triggered.connect(partial(self._event_rcMenu_deleteKey, iFrame))
menu.exec_(QtGui.QCursor.pos())
def _event_cbParent_wheel(self, event):
"""
Empty override to prevent the user to change the parent with the mouse wheel
"""
pass
def _event_rcMenu_deleteKey(self, iFrame):
"""
Right-Click menu action to delete a constrained key
"""
self.pSelSpSys.deleteKey(iFrame)
self._update_tblFrameInfo(self.pSelSpSys)
def _event_btnUpdateAll_pressed(self):
"""
Update all the constraint offset, can be usefull if a parent have been moved
"""
self.pSelSpSys.update_constraint_keys(_updateAll=True)
def _event_btnAction_pressed(self):
"""
Manage the different action that can happen on the tool. Will change depending on the selection
"""
if self.mode == Mode.Create:
if pymel.referenceQuery(self.nSelDriven, isNodeReferenced=True):
bCreateParent = False
else:
if self.nSelDriven.getParent() is not None:
bCreateParent = False
else:
bCreateParent = True
#Block undo and selection changed callback for the moment we need export the network
pymel.undoInfo(stateWithoutFlush=False)
self.bBlockSelJob = True
pNewSp = SpaceSwitcherLogic()
if self.aSelDrivers:
pNewSp.setup_space_switch(self.nSelDriven, self.aSelDrivers, bCreateWolrdNode=False,
bCreateParent=bCreateParent)
else: # There is no drivers, so the user want the world to be one of them
pNewSp.setup_space_switch(self.nSelDriven, self.aSelDrivers, bCreateWolrdNode=True,
bCreateParent=bCreateParent)
libSerialization.export_network(pNewSp)
self.bBlockSelJob = False
pymel.undoInfo(stateWithoutFlush=True)
self.ui.cbSysList.addItem(pNewSp.nDriven.name())
self.aSceneSpaceSwitch.append(pNewSp)
elif self.mode == Mode.Add:
#Block undo and selection changed callback for the moment we need export the network
pymel.undoInfo(stateWithoutFlush=False)
self.bBlockSelJob = True
self.pSelSpSys.add_target(self.aSelDrivers)
# Delete the old network before updating a new one
aNetwork = libSerialization.getConnectedNetworks(self.pSelSpSys.nDriven, recursive=False)
pymel.delete(aNetwork)
libSerialization.export_network(self.pSelSpSys)
self.bBlockSelJob = False
pymel.undoInfo(stateWithoutFlush=True)
elif self.mode == Mode.Switch:
pCurParent = self.ui.lstParent.selectedIndexes()[0]
# Remove one to the index since the original parent doesn't really exist in the list of parent in the system
self.pSelSpSys.do_switch(pCurParent.row() - 1)
self._update_tblFrameInfo(self.pSelSpSys)
elif self.mode == Mode.SwitchSelect:
# Find the selected parent index
if len(self.aSelDrivers) == 0:
iSwitchIdx = -1
else:
iSwitchIdx = 0
for idx, nDriver in enumerate(self.pSelSpSys.aDrivers):
if nDriver == self.aSelDrivers[0]:
iSwitchIdx = idx
self.pSelSpSys.do_switch(iSwitchIdx)
self._update_tblFrameInfo(self.pSelSpSys)
elif self.mode == Mode.Remove:
iNbTarget = len(self.pSelSpSys.aDrivers)
self.toRemove.sort(reverse=True) # Ensure to remove from the bigger to the smaller index
# Delete the network
aNetwork = libSerialization.getConnectedNetworks(self.pSelSpSys.nDriven, recursive=False)
pymel.delete(aNetwork)
if iNbTarget == len(self.toRemove):
# Totally remove the constraint
self.pSelSpSys.remove_target(-1, _all=True)
self.aSceneSpaceSwitch.remove(self.pSelSpSys)
self.pSelSpSys = None
else:
#Block undo and selection changed callback for the moment we need export the network
pymel.undoInfo(stateWithoutFlush=False)
self.bBlockSelJob = True
for iIdx in self.toRemove:
self.pSelSpSys.remove_target(iIdx - 1)
# Recreate the network with refreshed data
libSerialization.export_network(self.pSelSpSys)
self.bBlockSelJob = True
pymel.undoInfo(stateWithoutFlush=True)
pymel.select(self.nSelDriven)
def _event_lstParent_selChanged(self):
"""
Manage the parent list selection change
"""
# First look if there is any checked out item
if not self.bInSelChanged:
self.toRemove = []
for iIdx in xrange(self.createModel.rowCount()):
pItem = self.createModel.item(iIdx)
if pItem.isCheckable():
if pItem.checkState() == QtCore.Qt.Checked:
self.toRemove.append(iIdx)
if self.toRemove:
self._set_mode_info(Mode.Remove, True)
else:
# Prevent a stuck status when unchecking all item
self._set_mode_info(Mode.Switch, False)
if self.mode == Mode.Switch:
pSel = self.ui.lstParent.selectedIndexes()
if pSel:
self._set_mode_info(Mode.Switch, True)
else:
self._set_mode_info(Mode.Switch, False)
def _event_edtFrame_changed(self, iRow):
"""
Manage a frame change in the frame info table
"""
pCellQLine = self.ui.tblFrameInfo.cellWidget(iRow, self.ID_COL_FRAME)
if pCellQLine.text() != "":
pCellFrame = self.ui.tblFrameInfo.item(iRow, self.ID_COL_FRAME)
iOldFrame = pCellFrame.data(QtCore.Qt.UserRole)
# pCellParent = self.ui.tblFrameInfo.item(iRow, self.ID_COL_PARENT)
# iParentIdx = pCellParent.data(QtCore.Qt.UserRole)
iNewFrame = int(pCellQLine.text())
# Prevent the user to move a key on a frame already constrained
if not (iNewFrame in self.aConstrainedFrame):
self.pSelSpSys.moveKey(iNewFrame, iOldFrame)
self._update_tblFrameInfo(self.pSelSpSys)
else:
pCellQLine.setText(str(iOldFrame))
pymel.select(self.nSelDriven)
def _event_edtFrame_endEdit(self, iRow):
"""
Ensure that the frame data is still shown if the user put no frame
"""
pCell = self.ui.tblFrameInfo.item(iRow, self.ID_COL_FRAME)
iCurFrame = pCell.data(QtCore.Qt.UserRole)
pCellQLine = self.ui.tblFrameInfo.cellWidget(iRow, self.ID_COL_FRAME)
if pCellQLine and pCellQLine.text() == "":
pCellQLine.setText(str(iCurFrame))
def _event_cbParent_indexChanged(self, _iRow, _iIndex):
"""
Manage a parent change in the frame info table
"""
iCurTime = pymel.currentTime()
pFrameQLine = self.ui.tblFrameInfo.cellWidget(_iRow, self.ID_COL_FRAME)
iFrame = int(pFrameQLine.text())
pymel.refresh(su=True)
pymel.setCurrentTime(iFrame)
self.pSelSpSys.do_switch(_iIndex - 1) # Combo Box index are bigger than the real index system
pymel.setCurrentTime(iCurTime)
pymel.refresh(su=False)
def _event_cbSys_selChanged(self, _iIdx):
"""
Manage the system change with the combo box. select the node related to the system selected in the combo box
"""
# Prevent a node selection when the comboBox index changed during selection changed callback
if not self.bInSelChanged:
if _iIdx > 0:
pCurSys = self.aSceneSpaceSwitch[_iIdx - 1]
pymel.select(pCurSys.nDriven)
else: # If the no system index is selected, but a node is selected in the scene, put back the selected system
for i, pSp in enumerate(self.aSceneSpaceSwitch):
if pSp.nDriven == self.nSelDriven:
# Change the index, but prevent the event to select back the node
self.bInSelChanged = True
self.ui.cbSysList.setCurrentIndex(i + 1)
self.bInSelChanged = False
break
def _event_cbPosition_selChanged(self, _iIdx):
"""
Manage a system constrained axis change. Change the connected position attributes between the constraint
and the constraint recept
"""
if self.pSelSpSys is not None and not self.bInSelChanged:
with pymel.UndoChunk():
sPos = self.ui.cbPosition.itemText(_iIdx)
if sPos.find("X") != -1:
if not self.pSelSpSys.nSwConstRecept.translateX.listConnections():
self.pSelSpSys.nSwConst.constraintTranslateX.connect(self.pSelSpSys.nSwConstRecept.translateX)
else:
if self.pSelSpSys.nSwConstRecept.translateX.listConnections():
self.pSelSpSys.nSwConstRecept.translateX.disconnect()
# Set to 0, we don't want the constraint to affect pos
self.pSelSpSys.nSwConstRecept.translateX.set(0.0)
if sPos.find("Y") != -1:
if not self.pSelSpSys.nSwConstRecept.translateY.listConnections():
self.pSelSpSys.nSwConst.constraintTranslateY.connect(self.pSelSpSys.nSwConstRecept.translateY)
else:
if self.pSelSpSys.nSwConstRecept.translateY.listConnections():
self.pSelSpSys.nSwConstRecept.translateY.disconnect()
# Set to 0, we don't want the constraint to affect pos
self.pSelSpSys.nSwConstRecept.translateY.set(0.0)
if sPos.find("Z") != -1:
if not self.pSelSpSys.nSwConstRecept.translateZ.listConnections():
self.pSelSpSys.nSwConst.constraintTranslateZ.connect(self.pSelSpSys.nSwConstRecept.translateZ)
else:
if self.pSelSpSys.nSwConstRecept.translateZ.listConnections():
self.pSelSpSys.nSwConstRecept.translateZ.disconnect()
# Set to 0, we don't want the constraint to affect pos
self.pSelSpSys.nSwConstRecept.translateZ.set(0.0)
self.pSelSpSys.update_constraint_keys()
def _event_cbRotation_selChanged(self, _iIdx):
"""
Manage a system constrained axis change. Change the connected rotation attributes between the constraint
and the constraint recept
"""
if self.pSelSpSys is not None and not self.bInSelChanged:
with pymel.UndoChunk():
sRot = self.ui.cbRotation.itemText(_iIdx)
if sRot.find("X") != -1:
if not self.pSelSpSys.nSwConstRecept.rotateX.listConnections():
self.pSelSpSys.nSwConst.constraintRotateX.connect(self.pSelSpSys.nSwConstRecept.rotateX)
else:
if self.pSelSpSys.nSwConstRecept.rotateX.listConnections():
self.pSelSpSys.nSwConstRecept.rotateX.disconnect()
# Set to 0, we don't want the constraint to affect rot
self.pSelSpSys.nSwConstRecept.rotateX.set(0.0)
if sRot.find("Y") != -1:
if not self.pSelSpSys.nSwConstRecept.rotateY.listConnections():
self.pSelSpSys.nSwConst.constraintRotateY.connect(self.pSelSpSys.nSwConstRecept.rotateY)
else:
if self.pSelSpSys.nSwConstRecept.rotateY.listConnections():
self.pSelSpSys.nSwConstRecept.rotateY.disconnect()
# Set to 0, we don't want the constraint to affect rot
self.pSelSpSys.nSwConstRecept.rotateY.set(0.0)
if sRot.find("Z") != -1:
if not self.pSelSpSys.nSwConstRecept.rotateZ.listConnections():
self.pSelSpSys.nSwConst.constraintRotateZ.connect(self.pSelSpSys.nSwConstRecept.rotateZ)
else:
if self.pSelSpSys.nSwConstRecept.rotateZ.listConnections():
self.pSelSpSys.nSwConstRecept.rotateZ.disconnect()
# Set to 0, we don't want the constraint to affect rot
self.pSelSpSys.nSwConstRecept.rotateZ.set(0.0)
self.pSelSpSys.update_constraint_keys()
def _event_btnRefresh_pressed(self):
"""
Refresh Button pressed Event
"""
self.refresh()
def _event_btnRemove_pressed(self, iRow):
"""
Delete the key in the same row than the delete button
"""
pFrameCell = self.ui.tblFrameInfo.item(iRow, self.ID_COL_FRAME)
iFrame = pFrameCell.data(QtCore.Qt.UserRole)
self.pSelSpSys.deleteKey(iFrame)
self._update_tblFrameInfo(self.pSelSpSys)
class SpaceSwitcher(object):
"""
This class is used to create the main dialog and have the name of the file, so it need to exist to be correctly called
"""
def __init__(self, *args, **kwargs):
# Try to kill the existing window
try:
if cmds.window("SpaceSwitcher", ex=True):
cmds.deleteUI("SpaceSwitcher")
except:
pass
self.pDialog = SpaceSwitcherDialog(getMayaWindow())
self.centerDialog()
self.pDialog.setWindowTitle("Space Switcher")
self.pDialog.setObjectName("SpaceSwitcher")
self.pDialog.show()
def centerDialog(self):
# Create a frame geo to easilly move it from the center
pFrame = self.pDialog.frameGeometry()
pScreen = QtWidgets.QApplication.desktop().screenNumber(QtWidgets.QApplication.desktop().cursor().pos())
ptCenter = QtWidgets.QApplication.desktop().screenGeometry(pScreen).center()
pFrame.moveCenter(ptCenter)
self.pDialog.move(pFrame.topLeft())
| gpl-2.0 |
miraculixx/heroku-buildpack-python | vendor/pip-pop/pip/_vendor/colorama/ansi.py | 442 | 2304 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
'''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''
CSI = '\033['
OSC = '\033]'
BEL = '\007'
def code_to_chars(code):
return CSI + str(code) + 'm'
class AnsiCodes(object):
def __init__(self, codes):
for name in dir(codes):
if not name.startswith('_'):
value = getattr(codes, name)
setattr(self, name, code_to_chars(value))
class AnsiCursor(object):
def UP(self, n=1):
return CSI + str(n) + "A"
def DOWN(self, n=1):
return CSI + str(n) + "B"
def FORWARD(self, n=1):
return CSI + str(n) + "C"
def BACK(self, n=1):
return CSI + str(n) + "D"
def POS(self, x=1, y=1):
return CSI + str(y) + ";" + str(x) + "H"
def set_title(title):
return OSC + "2;" + title + BEL
def clear_screen(mode=2):
return CSI + str(mode) + "J"
def clear_line(mode=2):
return CSI + str(mode) + "K"
class AnsiFore:
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 90
LIGHTRED_EX = 91
LIGHTGREEN_EX = 92
LIGHTYELLOW_EX = 93
LIGHTBLUE_EX = 94
LIGHTMAGENTA_EX = 95
LIGHTCYAN_EX = 96
LIGHTWHITE_EX = 97
class AnsiBack:
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 100
LIGHTRED_EX = 101
LIGHTGREEN_EX = 102
LIGHTYELLOW_EX = 103
LIGHTBLUE_EX = 104
LIGHTMAGENTA_EX = 105
LIGHTCYAN_EX = 106
LIGHTWHITE_EX = 107
class AnsiStyle:
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiCodes( AnsiFore )
Back = AnsiCodes( AnsiBack )
Style = AnsiCodes( AnsiStyle )
Cursor = AnsiCursor()
| mit |
matrix65537/lab | algorithms/common/sortbase.py | 1 | 1166 | #!/usr/bin/python
#coding:utf8
class Key(object):
def __init__(self):
pass
def __lt__(self, key):
raise NotImplementedError, "< method must be implemented"
class IntNode(Key):
def __init__(self, v):
super(Key, self).__init__()
self.__v = v
def __lt__(self, node):
return self.__v < node.__v
def __str__(self):
return str(self.__v)
class SortBase(object):
def __init__(self):
pass
@staticmethod
def exch(keys, i, j):
keys[i], keys[j] = keys[j], keys[i]
@staticmethod
def less(keya, keyb):
if not (isinstance(keya, Key) and isinstance(keyb, Key)):
raise TypeError, "must be Key type for cmp"
return keya < keyb
@staticmethod
def show(keys):
for x in keys:
print x,
@staticmethod
def is_sorted(keys):
for i in range(1, len(keys)):
if SortBase.less(keys[i], keys[i-1]):
return False
return True
def sort(self, a):
raise NotImplementedError, "sort method not implemented"
def main():
pass
if __name__ == '__main__':
main()
| mit |
BorisJeremic/Real-ESSI-Examples | analytic_solution/test_cases/27NodeBrick/cantilever_different_Poisson/NumberOfDivision4/PoissonRatio0.49/compare_essi_version.py | 409 | 1085 | #!/usr/bin/python
import h5py
import sys
import numpy as np
import os
# automatically find the script directory.
# script_dir=sys.argv[1]
cur_dir=os.getcwd()
sep='test_cases'
test_DIR=cur_dir.split(sep,1)[0]
scriptDIR=test_DIR+'compare_function'
sys.path.append(scriptDIR)
# import my own command line color function
# from essi_max_disp_fun import find_max_disp
from mycolor_fun import *
print headblankline()
print headblankline()
print headstart(), "Original ESSI version information:"
fin=open("original.log")
for line in fin:
if 'Version' in line:
print headstep(), line,
if 'Compiled' in line:
print headstep(), line,
if 'Time Now' in line:
print headstep(), line,
if not line: break
print headblankline()
print headstart(), "New ESSI version information:"
fin=open("new.log")
for line in fin:
if 'Version' in line:
print headstep(), line,
if 'Compiled' in line:
print headstep(), line,
if 'Time Now' in line:
print headstep(), line,
if not line: break
print headstart()
print headblankline() | cc0-1.0 |
GoogleCloudPlatform/PerfKitBenchmarker | tests/linux_benchmarks/roberta_mmlm_benchmark_test.py | 1 | 2327 | # Copyright 2020 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for roberta_mmlm_benchmark."""
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import roberta_mmlm_benchmark
from perfkitbenchmarker.sample import Sample
class RobertaMmlmBenchmarkTestCase(unittest.TestCase,
test_util.SamplesTestMixin):
def setUp(self):
super(RobertaMmlmBenchmarkTestCase, self).setUp()
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'roberta_mmlm_output.txt')
with open(path) as fp:
self.contents = fp.read()
@mock.patch('time.time', mock.MagicMock(return_value=1550279509.59))
def testTrainResults(self):
samples = roberta_mmlm_benchmark.MakeSamplesFromOutput(
{'num_accelerators': 16}, self.contents)
self.assertEqual(436, len(samples))
golden = Sample(
metric='wps',
value=26259.0,
unit='wps',
metadata={
'num_accelerators': 16,
'epoch': '001',
'step': '10',
'steps per epoch': '2183',
'loss': '18.137',
'nll_loss': '18.137',
'ppl': '288277',
'wps': '26259',
'ups': '0',
'wpb': '60162.909',
'bsz': '128.000',
'num_updates': '11',
'lr': '2.93333e-07',
'gnorm': '8.833',
'clip': '1.000',
'oom': '0.000',
'loss_scale': '128.000',
'wall': '28',
'train_wall': '27',
},
timestamp=1550279509.59)
print(samples[0])
print(golden)
self.assertEqual(golden, samples[0])
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
chanceraine/nupic | tests/unit/nupic/regions/image_sensor_test.py | 17 | 4293 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest2 as unittest
import tempfile
import os
from PIL import Image, ImageDraw
from nupic.engine import Network
from nupic.regions.ImageSensor import ImageSensor
class ImageSensorTest(unittest.TestCase):
def testGetSelf(self):
# Create network
net = Network()
# Add sensor
sensor = net.addRegion("sensor", "py.ImageSensor",
"{width: 100, height: 50}")
pysensor = sensor.getSelf()
# Verify set parameters
self.assertEqual(type(pysensor), ImageSensor)
self.assertEqual(pysensor.height, 50)
self.assertEqual(pysensor.width, 100)
self.assertEqual(pysensor.width, sensor.getParameter('width'))
self.assertEqual(pysensor.height, sensor.getParameter('height'))
sensor.setParameter('width', 444)
sensor.setParameter('height', 444)
self.assertEqual(pysensor.width, 444)
self.assertEqual(pysensor.height, 444)
# Verify py object is not a copy
sensor.getSelf().height = 100
sensor.getSelf().width = 200
self.assertEqual(pysensor.height, 100)
self.assertEqual(pysensor.width, 200)
pysensor.height = 50
pysensor.width = 100
self.assertEqual(sensor.getSelf().height, 50)
self.assertEqual(sensor.getSelf().width, 100)
def testParameters(self):
# Test setting and getting parameters
net = Network()
# Add sensor to the network
sensor = net.addRegion("sensor", "py.ImageSensor",
"{width: 100, height: 50}")
# Verify get parameters
self.assertEqual(sensor.getParameter('height'), 50)
self.assertEqual(sensor.getParameter('width'), 100)
# Verify set parameters
sensor.setParameter('width', 42)
self.assertEqual(sensor.getParameter('width'), 42)
def testLoadImages(self):
# Create a simple network with an ImageSensor. You can't actually run
# the network because the region isn't connected to anything
net = Network()
net.addRegion("sensor", "py.ImageSensor", "{width: 32, height: 32}")
sensor = net.regions['sensor']
# Create a dataset with two categories, one image in each category
# Each image consists of a unique rectangle
tmpDir = tempfile.mkdtemp()
os.makedirs(os.path.join(tmpDir,'0'))
os.makedirs(os.path.join(tmpDir,'1'))
im0 = Image.new("L",(32,32))
draw = ImageDraw.Draw(im0)
draw.rectangle((10,10,20,20), outline=255)
im0.save(os.path.join(tmpDir,'0','im0.png'))
im1 = Image.new("L",(32,32))
draw = ImageDraw.Draw(im1)
draw.rectangle((15,15,25,25), outline=255)
im1.save(os.path.join(tmpDir,'1','im1.png'))
# Load the dataset and check we loaded the correct number
sensor.executeCommand(["loadMultipleImages", tmpDir])
numImages = sensor.getParameter('numImages')
self.assertEqual(numImages, 2)
# Load a single image (this will replace the previous images)
sensor.executeCommand(["loadSingleImage",
os.path.join(tmpDir,'1','im1.png')])
numImages = sensor.getParameter('numImages')
self.assertEqual(numImages, 1)
# Cleanup the temp files
os.unlink(os.path.join(tmpDir,'0','im0.png'))
os.unlink(os.path.join(tmpDir,'1','im1.png'))
os.removedirs(os.path.join(tmpDir,'0'))
os.removedirs(os.path.join(tmpDir,'1'))
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
alxgu/ansible | test/integration/targets/old_style_cache_plugins/plugins/cache/redis.py | 83 | 4391 | # (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: redis
short_description: Use Redis DB for cache
description:
- This cache uses JSON formatted, per host records saved in Redis.
version_added: "1.9"
requirements:
- redis>=2.4.5 (python lib)
options:
_uri:
description:
- A colon separated string of connection information for Redis.
required: True
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the DB entries
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout for the cache plugin data
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import time
import json
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.cache import BaseCacheModule
try:
from redis import StrictRedis, VERSION
except ImportError:
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
Keys are maintained in a zset with their score being the timestamp
when they are inserted. This allows for the usage of 'zremrangebyscore'
to expire keys. This mechanism is used or a pattern matched 'scan' for
performance.
"""
def __init__(self, *args, **kwargs):
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(':')
else:
connection = []
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = {}
self._db = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
def _make_key(self, key):
return self._prefix + key
def get(self, key):
if key not in self._cache:
value = self._db.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
self._cache[key] = json.loads(value)
return self._cache.get(key)
def set(self, key, value):
value2 = json.dumps(value)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._db.setex(self._make_key(key), int(self._timeout), value2)
else:
self._db.set(self._make_key(key), value2)
if VERSION[0] == 2:
self._db.zadd(self._keys_set, time.time(), key)
else:
self._db.zadd(self._keys_set, {key: time.time()})
self._cache[key] = value
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self):
self._expire_keys()
return self._db.zrange(self._keys_set, 0, -1)
def contains(self, key):
self._expire_keys()
return (self._db.zrank(self._keys_set, key) is not None)
def delete(self, key):
if key in self._cache:
del self._cache[key]
self._db.delete(self._make_key(key))
self._db.zrem(self._keys_set, key)
def flush(self):
for key in self.keys():
self.delete(key)
def copy(self):
# TODO: there is probably a better way to do this in redis
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
def __getstate__(self):
return dict()
def __setstate__(self, data):
self.__init__()
| gpl-3.0 |
abraxa/libsigrokdecode | decoders/usb_packet/pd.py | 10 | 12962 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2011 Gareth McMullin <gareth@blacksphere.co.nz>
## Copyright (C) 2012-2014 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
import sigrokdecode as srd
'''
OUTPUT_PYTHON format:
Packet:
[<ptype>, <pdata>]
<ptype>, <pdata>:
- 'SYNC', <sync>
- 'PID', <pid>
- 'ADDR', <addr>
- 'EP', <ep>
- 'CRC5', <crc5>
- 'CRC16', <crc16>
- 'EOP', <eop>
- 'FRAMENUM', <framenum>
- 'DATABYTE', <databyte>
- 'HUBADDR', <hubaddr>
- 'SC', <sc>
- 'PORT', <port>
- 'S', <s>
- 'E/U', <e/u>
- 'ET', <et>
- 'PACKET', [<pcategory>, <pname>, <pinfo>]
<pcategory>, <pname>, <pinfo>:
- 'TOKEN', 'OUT', [<sync>, <pid>, <addr>, <ep>, <crc5>, <eop>]
- 'TOKEN', 'IN', [<sync>, <pid>, <addr>, <ep>, <crc5>, <eop>]
- 'TOKEN', 'SOF', [<sync>, <pid>, <framenum>, <crc5>, <eop>]
- 'TOKEN', 'SETUP', [<sync>, <pid>, <addr>, <ep>, <crc5>, <eop>]
- 'DATA', 'DATA0', [<sync>, <pid>, <databytes>, <crc16>, <eop>]
- 'DATA', 'DATA1', [<sync>, <pid>, <databytes>, <crc16>, <eop>]
- 'DATA', 'DATA2', [<sync>, <pid>, <databytes>, <crc16>, <eop>]
- 'DATA', 'MDATA', [<sync>, <pid>, <databytes>, <crc16>, <eop>]
- 'HANDSHAKE', 'ACK', [<sync>, <pid>, <eop>]
- 'HANDSHAKE', 'NAK', [<sync>, <pid>, <eop>]
- 'HANDSHAKE', 'STALL', [<sync>, <pid>, <eop>]
- 'HANDSHAKE', 'NYET', [<sync>, <pid>, <eop>]
- 'SPECIAL', 'PRE', [<sync>, <pid>, <addr>, <ep>, <crc5>, <eop>]
- 'SPECIAL', 'ERR', [<sync>, <pid>, <eop>]
- 'SPECIAL', 'SPLIT',
[<sync>, <pid>, <hubaddr>, <sc>, <port>, <s>, <e/u>, <et>, <crc5>, <eop>]
- 'SPECIAL', 'PING', [<sync>, <pid>, <addr>, <ep>, <crc5>, <eop>]
- 'SPECIAL', 'Reserved', None
<sync>: SYNC field bitstring, normally '00000001' (8 chars).
<pid>: Packet ID bitstring, e.g. '11000011' for DATA0 (8 chars).
<addr>: Address field number, 0-127 (7 bits).
<ep>: Endpoint number, 0-15 (4 bits).
<crc5>: CRC-5 number (5 bits).
<crc16>: CRC-16 number (16 bits).
<eop>: End of packet marker. List of symbols, usually ['SE0', 'SE0', 'J'].
<framenum>: USB (micro)frame number, 0-2047 (11 bits).
<databyte>: A single data byte, e.g. 0x55.
<databytes>: List of data bytes, e.g. [0x55, 0xaa, 0x99] (0 - 1024 bytes).
<hubaddr>: TODO
<sc>: TODO
<port>: TODO
<s>: TODO
<e/u>: TODO
<et>: TODO
'''
# Packet IDs (PIDs).
# The first 4 bits are the 'packet type' field, the last 4 bits are the
# 'check field' (each bit in the check field must be the inverse of the resp.
# bit in the 'packet type' field; if not, that's a 'PID error').
# For the 4-bit strings, the left-most '1' or '0' is the LSB, i.e. it's sent
# to the bus first.
pids = {
# Tokens
'10000111': ['OUT', 'Address & EP number in host-to-function transaction'],
'10010110': ['IN', 'Address & EP number in function-to-host transaction'],
'10100101': ['SOF', 'Start-Of-Frame marker & frame number'],
'10110100': ['SETUP', 'Address & EP number in host-to-function transaction for SETUP to a control pipe'],
# Data
# Note: DATA2 and MDATA are HS-only.
'11000011': ['DATA0', 'Data packet PID even'],
'11010010': ['DATA1', 'Data packet PID odd'],
'11100001': ['DATA2', 'Data packet PID HS, high bandwidth isosynchronous transaction in a microframe'],
'11110000': ['MDATA', 'Data packet PID HS for split and high-bandwidth isosynchronous transactions'],
# Handshake
'01001011': ['ACK', 'Receiver accepts error-free packet'],
'01011010': ['NAK', 'Receiver cannot accept or transmitter cannot send'],
'01111000': ['STALL', 'EP halted or control pipe request unsupported'],
'01101001': ['NYET', 'No response yet from receiver'],
# Special
'00111100': ['PRE', 'Host-issued preamble; enables downstream bus traffic to low-speed devices'],
'00111100': ['ERR', 'Split transaction error handshake'],
'00011110': ['SPLIT', 'HS split transaction token'],
'00101101': ['PING', 'HS flow control probe for a bulk/control EP'],
'00001111': ['Reserved', 'Reserved PID'],
}
def get_category(pidname):
if pidname in ('OUT', 'IN', 'SOF', 'SETUP'):
return 'TOKEN'
elif pidname in ('DATA0', 'DATA1', 'DATA2', 'MDATA'):
return 'DATA'
elif pidname in ('ACK', 'NAK', 'STALL', 'NYET'):
return 'HANDSHAKE'
else:
return 'SPECIAL'
def ann_index(pidname):
l = ['OUT', 'IN', 'SOF', 'SETUP', 'DATA0', 'DATA1', 'DATA2', 'MDATA',
'ACK', 'NAK', 'STALL', 'NYET', 'PRE', 'ERR', 'SPLIT', 'PING',
'Reserved']
if pidname not in l:
return 28
return l.index(pidname) + 11
def bitstr_to_num(bitstr):
if not bitstr:
return 0
l = list(bitstr)
l.reverse()
return int(''.join(l), 2)
class Decoder(srd.Decoder):
api_version = 2
id = 'usb_packet'
name = 'USB packet'
longname = 'Universal Serial Bus (LS/FS) packet'
desc = 'USB (low-speed and full-speed) packet protocol.'
license = 'gplv2+'
inputs = ['usb_signalling']
outputs = ['usb_packet']
options = (
{'id': 'signalling', 'desc': 'Signalling',
'default': 'full-speed', 'values': ('full-speed', 'low-speed')},
)
annotations = (
('sync-ok', 'SYNC'),
('sync-err', 'SYNC (error)'),
('pid', 'PID'),
('framenum', 'FRAMENUM'),
('addr', 'ADDR'),
('ep', 'EP'),
('crc5-ok', 'CRC5'),
('crc5-err', 'CRC5 (error)'),
('data', 'DATA'),
('crc16-ok', 'CRC16'),
('crc16-err', 'CRC16 (error)'),
('packet-out', 'Packet: OUT'),
('packet-in', 'Packet: IN'),
('packet-sof', 'Packet: SOF'),
('packet-setup', 'Packet: SETUP'),
('packet-data0', 'Packet: DATA0'),
('packet-data1', 'Packet: DATA1'),
('packet-data2', 'Packet: DATA2'),
('packet-mdata', 'Packet: MDATA'),
('packet-ack', 'Packet: ACK'),
('packet-nak', 'Packet: NAK'),
('packet-stall', 'Packet: STALL'),
('packet-nyet', 'Packet: NYET'),
('packet-pre', 'Packet: PRE'),
('packet-err', 'Packet: ERR'),
('packet-split', 'Packet: SPLIT'),
('packet-ping', 'Packet: PING'),
('packet-reserved', 'Packet: Reserved'),
('packet-invalid', 'Packet: Invalid'),
)
annotation_rows = (
('fields', 'Packet fields', tuple(range(10 + 1))),
('packet', 'Packets', tuple(range(11, 28 + 1))),
)
def __init__(self):
self.bits = []
self.packet = []
self.packet_summary = ''
self.ss = self.es = None
self.ss_packet = self.es_packet = None
self.state = 'WAIT FOR SOP'
def putpb(self, data):
self.put(self.ss, self.es, self.out_python, data)
def putb(self, data):
self.put(self.ss, self.es, self.out_ann, data)
def putpp(self, data):
self.put(self.ss_packet, self.es_packet, self.out_python, data)
def putp(self, data):
self.put(self.ss_packet, self.es_packet, self.out_ann, data)
def start(self):
self.out_python = self.register(srd.OUTPUT_PYTHON)
self.out_ann = self.register(srd.OUTPUT_ANN)
def handle_packet(self):
packet = ''
for (bit, ss, es) in self.bits:
packet += bit
# Bits[0:7]: SYNC
sync = packet[:7 + 1]
self.ss, self.es = self.bits[0][1], self.bits[7][2]
# The SYNC pattern for low-speed/full-speed is KJKJKJKK (00000001).
if sync != '00000001':
self.putpb(['SYNC ERROR', sync])
self.putb([1, ['SYNC ERROR: %s' % sync, 'SYNC ERR: %s' % sync,
'SYNC ERR', 'SE', 'S']])
else:
self.putpb(['SYNC', sync])
self.putb([0, ['SYNC: %s' % sync, 'SYNC', 'S']])
self.packet.append(sync)
# Bits[8:15]: PID
pid = packet[8:15 + 1]
pidname = pids.get(pid, (pid, ''))[0]
self.ss, self.es = self.bits[8][1], self.bits[15][2]
self.putpb(['PID', pidname])
self.putb([2, ['PID: %s' % pidname, pidname, pidname[0]]])
self.packet.append(pid)
self.packet_summary += pidname
if pidname in ('OUT', 'IN', 'SOF', 'SETUP', 'PRE', 'PING'):
if pidname == 'SOF':
# Bits[16:26]: Framenum
framenum = bitstr_to_num(packet[16:26 + 1])
self.ss, self.es = self.bits[16][1], self.bits[26][2]
self.putpb(['FRAMENUM', framenum])
self.putb([3, ['Frame: %d' % framenum, 'Frame', 'Fr', 'F']])
self.packet.append(framenum)
self.packet_summary += ' %d' % framenum
else:
# Bits[16:22]: Addr
addr = bitstr_to_num(packet[16:22 + 1])
self.ss, self.es = self.bits[16][1], self.bits[22][2]
self.putpb(['ADDR', addr])
self.putb([4, ['Address: %d' % addr, 'Addr: %d' % addr,
'Addr', 'A']])
self.packet.append(addr)
self.packet_summary += ' ADDR %d' % addr
# Bits[23:26]: EP
ep = bitstr_to_num(packet[23:26 + 1])
self.ss, self.es = self.bits[23][1], self.bits[26][2]
self.putpb(['EP', ep])
self.putb([5, ['Endpoint: %d' % ep, 'EP: %d' % ep, 'EP', 'E']])
self.packet.append(ep)
self.packet_summary += ' EP %d' % ep
# Bits[27:31]: CRC5
crc5 = bitstr_to_num(packet[27:31 + 1])
self.ss, self.es = self.bits[27][1], self.bits[31][2]
self.putpb(['CRC5', crc5])
self.putb([6, ['CRC5: 0x%02X' % crc5, 'CRC5', 'C']])
self.packet.append(crc5)
elif pidname in ('DATA0', 'DATA1', 'DATA2', 'MDATA'):
# Bits[16:packetlen-16]: Data
data = packet[16:-16]
# TODO: len(data) must be a multiple of 8.
databytes = []
self.packet_summary += ' ['
for i in range(0, len(data), 8):
db = bitstr_to_num(data[i:i + 8])
self.ss, self.es = self.bits[16 + i][1], self.bits[23 + i][2]
self.putpb(['DATABYTE', db])
self.putb([8, ['Databyte: %02X' % db, 'Data: %02X' % db,
'DB: %02X' % db, '%02X' % db]])
databytes.append(db)
self.packet_summary += ' %02X' % db
data = data[8:]
self.packet_summary += ' ]'
# Convenience Python output (no annotation) for all bytes together.
self.ss, self.es = self.bits[16][1], self.bits[-16][2]
self.putpb(['DATABYTES', databytes])
self.packet.append(databytes)
# Bits[packetlen-16:packetlen]: CRC16
crc16 = bitstr_to_num(packet[-16:])
self.ss, self.es = self.bits[-16][1], self.bits[-1][2]
self.putpb(['CRC16', crc16])
self.putb([9, ['CRC16: 0x%04X' % crc16, 'CRC16', 'C']])
self.packet.append(crc16)
elif pidname in ('ACK', 'NAK', 'STALL', 'NYET', 'ERR'):
pass # Nothing to do, these only have SYNC+PID+EOP fields.
else:
pass # TODO: Handle 'SPLIT' and possibly 'Reserved' packets.
# Output a (summary of) the whole packet.
pcategory, pname, pinfo = get_category(pidname), pidname, self.packet
self.putpp(['PACKET', [pcategory, pname, pinfo]])
self.putp([ann_index(pidname), ['%s' % self.packet_summary]])
self.packet, self.packet_summary = [], ''
def decode(self, ss, es, data):
(ptype, pdata) = data
# We only care about certain packet types for now.
if ptype not in ('SOP', 'BIT', 'EOP'):
return
# State machine.
if self.state == 'WAIT FOR SOP':
if ptype != 'SOP':
return
self.ss_packet = ss
self.state = 'GET BIT'
elif self.state == 'GET BIT':
if ptype == 'BIT':
self.bits.append([pdata, ss, es])
elif ptype == 'EOP':
self.es_packet = es
self.handle_packet()
self.bits, self.state = [], 'WAIT FOR SOP'
else:
pass # TODO: Error
| gpl-3.0 |
heke123/chromium-crosswalk | tools/grit/grit/tclib.py | 59 | 6839 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Adaptation of the extern.tclib classes for our needs.
'''
import re
import types
from grit import exception
from grit import lazy_re
import grit.extern.tclib
# Matches whitespace sequences which can be folded into a single whitespace
# character. This matches single characters so that non-spaces are replaced
# with spaces.
_FOLD_WHITESPACE = re.compile(r'\s+')
def Identity(i):
return i
class BaseMessage(object):
'''Base class with methods shared by Message and Translation.
'''
def __init__(self, text='', placeholders=[], description='', meaning=''):
self.parts = []
self.placeholders = []
self.meaning = meaning
self.dirty = True # True if self.id is (or might be) wrong
self.id = 0
self.SetDescription(description)
if text != '':
if not placeholders or placeholders == []:
self.AppendText(text)
else:
tag_map = {}
for placeholder in placeholders:
tag_map[placeholder.GetPresentation()] = [placeholder, 0]
# This creates a regexp like '(TAG1|TAG2|TAG3)'.
# The tags have to be sorted in order of decreasing length, so that
# longer tags are substituted before shorter tags that happen to be
# substrings of the longer tag.
# E.g. "EXAMPLE_FOO_NAME" must be matched before "EXAMPLE_FOO",
# otherwise "EXAMPLE_FOO" splits "EXAMPLE_FOO_NAME" too.
tags = tag_map.keys()
tags.sort(cmp=lambda x,y: len(x) - len(y) or cmp(x, y), reverse=True)
tag_re = '(' + '|'.join(tags) + ')'
chunked_text = re.split(tag_re, text)
for chunk in chunked_text:
if chunk: # ignore empty chunk
if tag_map.has_key(chunk):
self.AppendPlaceholder(tag_map[chunk][0])
tag_map[chunk][1] += 1 # increase placeholder use count
else:
self.AppendText(chunk)
for key in tag_map.keys():
assert tag_map[key][1] != 0
def GetRealContent(self, escaping_function=Identity):
'''Returns the original content, i.e. what your application and users
will see.
Specify a function to escape each translateable bit, if you like.
'''
bits = []
for item in self.parts:
if isinstance(item, types.StringTypes):
bits.append(escaping_function(item))
else:
bits.append(item.GetOriginal())
return ''.join(bits)
def GetPresentableContent(self):
presentable_content = []
for part in self.parts:
if isinstance(part, Placeholder):
presentable_content.append(part.GetPresentation())
else:
presentable_content.append(part)
return ''.join(presentable_content)
def AppendPlaceholder(self, placeholder):
assert isinstance(placeholder, Placeholder)
dup = False
for other in self.GetPlaceholders():
if other.presentation == placeholder.presentation:
assert other.original == placeholder.original
dup = True
if not dup:
self.placeholders.append(placeholder)
self.parts.append(placeholder)
self.dirty = True
def AppendText(self, text):
assert isinstance(text, types.StringTypes)
assert text != ''
self.parts.append(text)
self.dirty = True
def GetContent(self):
'''Returns the parts of the message. You may modify parts if you wish.
Note that you must not call GetId() on this object until you have finished
modifying the contents.
'''
self.dirty = True # user might modify content
return self.parts
def GetDescription(self):
return self.description
def SetDescription(self, description):
self.description = _FOLD_WHITESPACE.sub(' ', description)
def GetMeaning(self):
return self.meaning
def GetId(self):
if self.dirty:
self.id = self.GenerateId()
self.dirty = False
return self.id
def GenerateId(self):
# Must use a UTF-8 encoded version of the presentable content, along with
# the meaning attribute, to match the TC.
return grit.extern.tclib.GenerateMessageId(
self.GetPresentableContent().encode('utf-8'), self.meaning)
def GetPlaceholders(self):
return self.placeholders
def FillTclibBaseMessage(self, msg):
msg.SetDescription(self.description.encode('utf-8'))
for part in self.parts:
if isinstance(part, Placeholder):
ph = grit.extern.tclib.Placeholder(
part.presentation.encode('utf-8'),
part.original.encode('utf-8'),
part.example.encode('utf-8'))
msg.AppendPlaceholder(ph)
else:
msg.AppendText(part.encode('utf-8'))
class Message(BaseMessage):
'''A message.'''
def __init__(self, text='', placeholders=[], description='', meaning='',
assigned_id=None):
super(Message, self).__init__(text, placeholders, description, meaning)
self.assigned_id = assigned_id
def ToTclibMessage(self):
msg = grit.extern.tclib.Message('utf-8', meaning=self.meaning)
self.FillTclibBaseMessage(msg)
return msg
def GetId(self):
'''Use the assigned id if we have one.'''
if self.assigned_id:
return self.assigned_id
return super(Message, self).GetId()
def HasAssignedId(self):
'''Returns True if this message has an assigned id.'''
return bool(self.assigned_id)
class Translation(BaseMessage):
'''A translation.'''
def __init__(self, text='', id='', placeholders=[], description='', meaning=''):
super(Translation, self).__init__(text, placeholders, description, meaning)
self.id = id
def GetId(self):
assert id != '', "ID has not been set."
return self.id
def SetId(self, id):
self.id = id
def ToTclibMessage(self):
msg = grit.extern.tclib.Message(
'utf-8', id=self.id, meaning=self.meaning)
self.FillTclibBaseMessage(msg)
return msg
class Placeholder(grit.extern.tclib.Placeholder):
'''Modifies constructor to accept a Unicode string
'''
# Must match placeholder presentation names
_NAME_RE = lazy_re.compile('^[A-Za-z0-9_]+$')
def __init__(self, presentation, original, example):
'''Creates a new placeholder.
Args:
presentation: 'USERNAME'
original: '%s'
example: 'Joi'
'''
assert presentation != ''
assert original != ''
assert example != ''
if not self._NAME_RE.match(presentation):
raise exception.InvalidPlaceholderName(presentation)
self.presentation = presentation
self.original = original
self.example = example
def GetPresentation(self):
return self.presentation
def GetOriginal(self):
return self.original
def GetExample(self):
return self.example
| bsd-3-clause |
eyohansa/django | tests/template_tests/syntax_tests/test_cache.py | 299 | 6777 | from django.core.cache import cache
from django.template import Context, Engine, TemplateSyntaxError
from django.test import SimpleTestCase, override_settings
from ..utils import setup
class CacheTagTests(SimpleTestCase):
libraries = {
'cache': 'django.templatetags.cache',
'custom': 'template_tests.templatetags.custom',
}
def tearDown(self):
cache.clear()
@setup({'cache03': '{% load cache %}{% cache 2 test %}cache03{% endcache %}'})
def test_cache03(self):
output = self.engine.render_to_string('cache03')
self.assertEqual(output, 'cache03')
@setup({
'cache03': '{% load cache %}{% cache 2 test %}cache03{% endcache %}',
'cache04': '{% load cache %}{% cache 2 test %}cache04{% endcache %}',
})
def test_cache04(self):
self.engine.render_to_string('cache03')
output = self.engine.render_to_string('cache04')
self.assertEqual(output, 'cache03')
@setup({'cache05': '{% load cache %}{% cache 2 test foo %}cache05{% endcache %}'})
def test_cache05(self):
output = self.engine.render_to_string('cache05', {'foo': 1})
self.assertEqual(output, 'cache05')
@setup({'cache06': '{% load cache %}{% cache 2 test foo %}cache06{% endcache %}'})
def test_cache06(self):
output = self.engine.render_to_string('cache06', {'foo': 2})
self.assertEqual(output, 'cache06')
@setup({
'cache05': '{% load cache %}{% cache 2 test foo %}cache05{% endcache %}',
'cache07': '{% load cache %}{% cache 2 test foo %}cache07{% endcache %}',
})
def test_cache07(self):
context = {'foo': 1}
self.engine.render_to_string('cache05', context)
output = self.engine.render_to_string('cache07', context)
self.assertEqual(output, 'cache05')
@setup({
'cache06': '{% load cache %}{% cache 2 test foo %}cache06{% endcache %}',
'cache08': '{% load cache %}{% cache time test foo %}cache08{% endcache %}',
})
def test_cache08(self):
"""
Allow first argument to be a variable.
"""
context = {'foo': 2, 'time': 2}
self.engine.render_to_string('cache06', context)
output = self.engine.render_to_string('cache08', context)
self.assertEqual(output, 'cache06')
# Raise exception if we don't have at least 2 args, first one integer.
@setup({'cache11': '{% load cache %}{% cache %}{% endcache %}'})
def test_cache11(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cache11')
@setup({'cache12': '{% load cache %}{% cache 1 %}{% endcache %}'})
def test_cache12(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cache12')
@setup({'cache13': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache13(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache13')
@setup({'cache14': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache14(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache14', {'foo': 'fail'})
@setup({'cache15': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache15(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache15', {'foo': []})
@setup({'cache16': '{% load cache %}{% cache 1 foo bar %}{% endcache %}'})
def test_cache16(self):
"""
Regression test for #7460.
"""
output = self.engine.render_to_string('cache16', {'foo': 'foo', 'bar': 'with spaces'})
self.assertEqual(output, '')
@setup({'cache17': '{% load cache %}{% cache 10 long_cache_key poem %}Some Content{% endcache %}'})
def test_cache17(self):
"""
Regression test for #11270.
"""
output = self.engine.render_to_string('cache17', {'poem': 'Oh freddled gruntbuggly/'
'Thy micturations are to me/'
'As plurdled gabbleblotchits/'
'On a lurgid bee/'
'That mordiously hath bitled out/'
'Its earted jurtles/'
'Into a rancid festering/'
'Or else I shall rend thee in the gobberwarts'
'with my blurglecruncheon/'
'See if I dont.'})
self.assertEqual(output, 'Some Content')
@setup({'cache18': '{% load cache custom %}{% cache 2|noop:"x y" cache18 %}cache18{% endcache %}'})
def test_cache18(self):
"""
Test whitespace in filter arguments
"""
output = self.engine.render_to_string('cache18')
self.assertEqual(output, 'cache18')
class CacheTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(libraries={'cache': 'django.templatetags.cache'})
super(CacheTests, cls).setUpClass()
def test_cache_regression_20130(self):
t = self.engine.from_string('{% load cache %}{% cache 1 regression_20130 %}foo{% endcache %}')
cachenode = t.nodelist[1]
self.assertEqual(cachenode.fragment_name, 'regression_20130')
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'default',
},
'template_fragments': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'fragments',
},
})
def test_cache_fragment_cache(self):
"""
When a cache called "template_fragments" is present, the cache tag
will use it in preference to 'default'
"""
t1 = self.engine.from_string('{% load cache %}{% cache 1 fragment %}foo{% endcache %}')
t2 = self.engine.from_string('{% load cache %}{% cache 1 fragment using="default" %}bar{% endcache %}')
ctx = Context()
o1 = t1.render(ctx)
o2 = t2.render(ctx)
self.assertEqual(o1, 'foo')
self.assertEqual(o2, 'bar')
def test_cache_missing_backend(self):
"""
When a cache that doesn't exist is specified, the cache tag will
raise a TemplateSyntaxError
'"""
t = self.engine.from_string('{% load cache %}{% cache 1 backend using="unknown" %}bar{% endcache %}')
ctx = Context()
with self.assertRaises(TemplateSyntaxError):
t.render(ctx)
| bsd-3-clause |
hheimbuerger/suds-gzip | suds/client.py | 2 | 28840 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{2nd generation} service proxy provides access to web services.
See I{README.txt}
"""
import suds
import suds.metrics as metrics
from cookielib import CookieJar
from suds import *
from suds.reader import DefinitionsReader
from suds.transport import TransportError, Request
from suds.transport.https import HttpAuthenticated
from suds.servicedefinition import ServiceDefinition
from suds import sudsobject
from sudsobject import Factory as InstFactory
from sudsobject import Object
from suds.resolver import PathResolver
from suds.builder import Builder
from suds.wsdl import Definitions
from suds.cache import ObjectCache
from suds.sax.document import Document
from suds.sax.parser import Parser
from suds.options import Options
from suds.properties import Unskin
from urlparse import urlparse
from copy import deepcopy
from suds.plugin import PluginContainer
from logging import getLogger
log = getLogger(__name__)
class Client(object):
"""
A lightweight web services client.
I{(2nd generation)} API.
@ivar wsdl: The WSDL object.
@type wsdl:L{Definitions}
@ivar service: The service proxy used to invoke operations.
@type service: L{Service}
@ivar factory: The factory used to create objects.
@type factory: L{Factory}
@ivar sd: The service definition
@type sd: L{ServiceDefinition}
@ivar messages: The last sent/received messages.
@type messages: str[2]
"""
@classmethod
def items(cls, sobject):
"""
Extract the I{items} from a suds object much like the
items() method works on I{dict}.
@param sobject: A suds object
@type sobject: L{Object}
@return: A list of items contained in I{sobject}.
@rtype: [(key, value),...]
"""
return sudsobject.items(sobject)
@classmethod
def dict(cls, sobject):
"""
Convert a sudsobject into a dictionary.
@param sobject: A suds object
@type sobject: L{Object}
@return: A python dictionary containing the
items contained in I{sobject}.
@rtype: dict
"""
return sudsobject.asdict(sobject)
@classmethod
def metadata(cls, sobject):
"""
Extract the metadata from a suds object.
@param sobject: A suds object
@type sobject: L{Object}
@return: The object's metadata
@rtype: L{sudsobject.Metadata}
"""
return sobject.__metadata__
def __init__(self, url, **kwargs):
"""
@param url: The URL for the WSDL.
@type url: str
@param kwargs: keyword arguments.
@see: L{Options}
"""
options = Options()
options.transport = HttpAuthenticated()
self.options = options
options.cache = ObjectCache(days=1)
self.set_options(**kwargs)
reader = DefinitionsReader(options, Definitions)
self.wsdl = reader.open(url)
plugins = PluginContainer(options.plugins)
plugins.init.initialized(wsdl=self.wsdl)
self.factory = Factory(self.wsdl)
self.service = ServiceSelector(self, self.wsdl.services)
self.sd = []
for s in self.wsdl.services:
sd = ServiceDefinition(self.wsdl, s)
self.sd.append(sd)
self.messages = dict(tx=None, rx=None)
def set_options(self, **kwargs):
"""
Set options.
@param kwargs: keyword arguments.
@see: L{Options}
"""
p = Unskin(self.options)
p.update(kwargs)
def add_prefix(self, prefix, uri):
"""
Add I{static} mapping of an XML namespace prefix to a namespace.
This is useful for cases when a wsdl and referenced schemas make heavy
use of namespaces and those namespaces are subject to changed.
@param prefix: An XML namespace prefix.
@type prefix: str
@param uri: An XML namespace URI.
@type uri: str
@raise Exception: when prefix is already mapped.
"""
root = self.wsdl.root
mapped = root.resolvePrefix(prefix, None)
if mapped is None:
root.addPrefix(prefix, uri)
return
if mapped[1] != uri:
raise Exception('"%s" already mapped as "%s"' % (prefix, mapped))
def last_sent(self):
"""
Get last sent I{soap} message.
@return: The last sent I{soap} message.
@rtype: L{Document}
"""
return self.messages.get('tx')
def last_received(self):
"""
Get last received I{soap} message.
@return: The last received I{soap} message.
@rtype: L{Document}
"""
return self.messages.get('rx')
def clone(self):
"""
Get a shallow clone of this object.
The clone only shares the WSDL. All other attributes are
unique to the cloned object including options.
@return: A shallow clone.
@rtype: L{Client}
"""
class Uninitialized(Client):
def __init__(self):
pass
clone = Uninitialized()
clone.options = Options()
cp = Unskin(clone.options)
mp = Unskin(self.options)
cp.update(deepcopy(mp))
clone.wsdl = self.wsdl
clone.factory = self.factory
clone.service = ServiceSelector(clone, self.wsdl.services)
clone.sd = self.sd
clone.messages = dict(tx=None, rx=None)
return clone
def __str__(self):
return unicode(self)
def __unicode__(self):
s = ['\n']
build = suds.__build__.split()
s.append('Suds ( https://fedorahosted.org/suds/ )')
s.append(' version: %s' % suds.__version__)
s.append(' %s build: %s' % (build[0], build[1]))
for sd in self.sd:
s.append('\n\n%s' % unicode(sd))
return ''.join(s)
class Factory:
"""
A factory for instantiating types defined in the wsdl
@ivar resolver: A schema type resolver.
@type resolver: L{PathResolver}
@ivar builder: A schema object builder.
@type builder: L{Builder}
"""
def __init__(self, wsdl):
"""
@param wsdl: A schema object.
@type wsdl: L{wsdl.Definitions}
"""
self.wsdl = wsdl
self.resolver = PathResolver(wsdl)
self.builder = Builder(self.resolver)
def create(self, name):
"""
create a WSDL type by name
@param name: The name of a type defined in the WSDL.
@type name: str
@return: The requested object.
@rtype: L{Object}
"""
timer = metrics.Timer()
timer.start()
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
if type.enum():
result = InstFactory.object(name)
for e, a in type.children():
setattr(result, e.name, e.name)
else:
try:
result = self.builder.build(type)
except Exception, e:
log.error("create '%s' failed", name, exc_info=True)
raise BuildError(name, e)
timer.stop()
metrics.log.debug('%s created: %s', name, timer)
return result
def separator(self, ps):
"""
Set the path separator.
@param ps: The new path separator.
@type ps: char
"""
self.resolver = PathResolver(self.wsdl, ps)
class ServiceSelector:
"""
The B{service} selector is used to select a web service.
In most cases, the wsdl only defines (1) service in which access
by subscript is passed through to a L{PortSelector}. This is also the
behavior when a I{default} service has been specified. In cases
where multiple services have been defined and no default has been
specified, the service is found by name (or index) and a L{PortSelector}
for the service is returned. In all cases, attribute access is
forwarded to the L{PortSelector} for either the I{first} service or the
I{default} service (when specified).
@ivar __client: A suds client.
@type __client: L{Client}
@ivar __services: A list of I{wsdl} services.
@type __services: list
"""
def __init__(self, client, services):
"""
@param client: A suds client.
@type client: L{Client}
@param services: A list of I{wsdl} services.
@type services: list
"""
self.__client = client
self.__services = services
def __getattr__(self, name):
"""
Request to access an attribute is forwarded to the
L{PortSelector} for either the I{first} service or the
I{default} service (when specified).
@param name: The name of a method.
@type name: str
@return: A L{PortSelector}.
@rtype: L{PortSelector}.
"""
default = self.__ds()
if default is None:
port = self.__find(0)
else:
port = default
return getattr(port, name)
def __getitem__(self, name):
"""
Provides selection of the I{service} by name (string) or
index (integer). In cases where only (1) service is defined
or a I{default} has been specified, the request is forwarded
to the L{PortSelector}.
@param name: The name (or index) of a service.
@type name: (int|str)
@return: A L{PortSelector} for the specified service.
@rtype: L{PortSelector}.
"""
if len(self.__services) == 1:
port = self.__find(0)
return port[name]
default = self.__ds()
if default is not None:
port = default
return port[name]
return self.__find(name)
def __find(self, name):
"""
Find a I{service} by name (string) or index (integer).
@param name: The name (or index) of a service.
@type name: (int|str)
@return: A L{PortSelector} for the found service.
@rtype: L{PortSelector}.
"""
service = None
if not len(self.__services):
raise Exception, 'No services defined'
if isinstance(name, int):
try:
service = self.__services[name]
name = service.name
except IndexError:
raise ServiceNotFound, 'at [%d]' % name
else:
for s in self.__services:
if name == s.name:
service = s
break
if service is None:
raise ServiceNotFound, name
return PortSelector(self.__client, service.ports, name)
def __ds(self):
"""
Get the I{default} service if defined in the I{options}.
@return: A L{PortSelector} for the I{default} service.
@rtype: L{PortSelector}.
"""
ds = self.__client.options.service
if ds is None:
return None
else:
return self.__find(ds)
class PortSelector:
"""
The B{port} selector is used to select a I{web service} B{port}.
In cases where multiple ports have been defined and no default has been
specified, the port is found by name (or index) and a L{MethodSelector}
for the port is returned. In all cases, attribute access is
forwarded to the L{MethodSelector} for either the I{first} port or the
I{default} port (when specified).
@ivar __client: A suds client.
@type __client: L{Client}
@ivar __ports: A list of I{service} ports.
@type __ports: list
@ivar __qn: The I{qualified} name of the port (used for logging).
@type __qn: str
"""
def __init__(self, client, ports, qn):
"""
@param client: A suds client.
@type client: L{Client}
@param ports: A list of I{service} ports.
@type ports: list
@param qn: The name of the service.
@type qn: str
"""
self.__client = client
self.__ports = ports
self.__qn = qn
def __getattr__(self, name):
"""
Request to access an attribute is forwarded to the
L{MethodSelector} for either the I{first} port or the
I{default} port (when specified).
@param name: The name of a method.
@type name: str
@return: A L{MethodSelector}.
@rtype: L{MethodSelector}.
"""
default = self.__dp()
if default is None:
m = self.__find(0)
else:
m = default
return getattr(m, name)
def __getitem__(self, name):
"""
Provides selection of the I{port} by name (string) or
index (integer). In cases where only (1) port is defined
or a I{default} has been specified, the request is forwarded
to the L{MethodSelector}.
@param name: The name (or index) of a port.
@type name: (int|str)
@return: A L{MethodSelector} for the specified port.
@rtype: L{MethodSelector}.
"""
default = self.__dp()
if default is None:
return self.__find(name)
else:
return default
def __find(self, name):
"""
Find a I{port} by name (string) or index (integer).
@param name: The name (or index) of a port.
@type name: (int|str)
@return: A L{MethodSelector} for the found port.
@rtype: L{MethodSelector}.
"""
port = None
if not len(self.__ports):
raise Exception, 'No ports defined: %s' % self.__qn
if isinstance(name, int):
qn = '%s[%d]' % (self.__qn, name)
try:
port = self.__ports[name]
except IndexError:
raise PortNotFound, qn
else:
qn = '.'.join((self.__qn, name))
for p in self.__ports:
if name == p.name:
port = p
break
if port is None:
raise PortNotFound, qn
qn = '.'.join((self.__qn, port.name))
return MethodSelector(self.__client, port.methods, qn)
def __dp(self):
"""
Get the I{default} port if defined in the I{options}.
@return: A L{MethodSelector} for the I{default} port.
@rtype: L{MethodSelector}.
"""
dp = self.__client.options.port
if dp is None:
return None
else:
return self.__find(dp)
class MethodSelector:
"""
The B{method} selector is used to select a B{method} by name.
@ivar __client: A suds client.
@type __client: L{Client}
@ivar __methods: A dictionary of methods.
@type __methods: dict
@ivar __qn: The I{qualified} name of the method (used for logging).
@type __qn: str
"""
def __init__(self, client, methods, qn):
"""
@param client: A suds client.
@type client: L{Client}
@param methods: A dictionary of methods.
@type methods: dict
@param qn: The I{qualified} name of the port.
@type qn: str
"""
self.__client = client
self.__methods = methods
self.__qn = qn
def __getattr__(self, name):
"""
Get a method by name and return it in an I{execution wrapper}.
@param name: The name of a method.
@type name: str
@return: An I{execution wrapper} for the specified method name.
@rtype: L{Method}
"""
return self[name]
def __getitem__(self, name):
"""
Get a method by name and return it in an I{execution wrapper}.
@param name: The name of a method.
@type name: str
@return: An I{execution wrapper} for the specified method name.
@rtype: L{Method}
"""
m = self.__methods.get(name)
if m is None:
qn = '.'.join((self.__qn, name))
raise MethodNotFound, qn
return Method(self.__client, m)
class Method:
"""
The I{method} (namespace) object.
@ivar client: A client object.
@type client: L{Client}
@ivar method: A I{wsdl} method.
@type I{wsdl} Method.
"""
def __init__(self, client, method):
"""
@param client: A client object.
@type client: L{Client}
@param method: A I{raw} method.
@type I{raw} Method.
"""
self.client = client
self.method = method
def __call__(self, *args, **kwargs):
"""
Invoke the method.
"""
clientclass = self.clientclass(kwargs)
client = clientclass(self.client, self.method)
if not self.faults():
try:
return client.invoke(args, kwargs)
except WebFault, e:
return (500, e)
else:
return client.invoke(args, kwargs)
def faults(self):
""" get faults option """
return self.client.options.faults
def clientclass(self, kwargs):
""" get soap client class """
if SimClient.simulation(kwargs):
return SimClient
else:
return SoapClient
class SoapClient:
"""
A lightweight soap based web client B{**not intended for external use}
@ivar service: The target method.
@type service: L{Service}
@ivar method: A target method.
@type method: L{Method}
@ivar options: A dictonary of options.
@type options: dict
@ivar cookiejar: A cookie jar.
@type cookiejar: libcookie.CookieJar
"""
def __init__(self, client, method):
"""
@param client: A suds client.
@type client: L{Client}
@param method: A target method.
@type method: L{Method}
"""
self.client = client
self.method = method
self.options = client.options
self.cookiejar = CookieJar()
def invoke(self, args, kwargs):
"""
Send the required soap message to invoke the specified method
@param args: A list of args for the method invoked.
@type args: list
@param kwargs: Named (keyword) args for the method invoked.
@type kwargs: dict
@return: The result of the method invocation.
@rtype: I{builtin}|I{subclass of} L{Object}
"""
timer = metrics.Timer()
timer.start()
result = None
binding = self.method.binding.input
soapenv = binding.get_message(self.method, args, kwargs)
timer.stop()
metrics.log.debug(
"message for '%s' created: %s",
self.method.name,
timer)
timer.start()
result = self.send(soapenv)
timer.stop()
metrics.log.debug(
"method '%s' invoked: %s",
self.method.name,
timer)
return result
def send(self, soapenv):
"""
Send soap message.
@param soapenv: A soap envelope to send.
@type soapenv: L{Document}
@return: The reply to the sent message.
@rtype: I{builtin} or I{subclass of} L{Object}
"""
result = None
location = self.location()
binding = self.method.binding.input
transport = self.options.transport
retxml = self.options.retxml
nosend = self.options.nosend
prettyxml = self.options.prettyxml
timer = metrics.Timer()
log.debug('sending to (%s)\nmessage:\n%s', location, soapenv)
try:
self.last_sent(soapenv)
plugins = PluginContainer(self.options.plugins)
plugins.message.marshalled(envelope=soapenv.root())
if prettyxml:
soapenv = soapenv.str()
else:
soapenv = soapenv.plain()
soapenv = soapenv.encode('utf-8')
ctx = plugins.message.sending(envelope=soapenv)
soapenv = ctx.envelope
if nosend:
return RequestContext(self, binding, soapenv)
request = Request(location, soapenv)
request.headers = self.headers()
timer.start()
reply = transport.send(request)
timer.stop()
metrics.log.debug('waited %s on server reply', timer)
ctx = plugins.message.received(reply=reply.message)
reply.message = ctx.reply
if retxml:
result = reply.message
else:
result = self.succeeded(binding, reply.message)
except TransportError, e:
if e.httpcode in (202,204):
result = None
else:
log.error(self.last_sent())
result = self.failed(binding, e)
return result
def headers(self):
"""
Get http headers or the http/https request.
@return: A dictionary of header/values.
@rtype: dict
"""
action = self.method.soap.action
if isinstance(action, unicode):
action = action.encode('utf-8')
stock = { 'Content-Type' : 'text/xml; charset=utf-8', 'SOAPAction': action }
# At this point the action was encoded, but the vanilla suds code takes all injected headers as they are,
# potentially implicitly decoding the whole request into a unicode string, if there's any unicode in the
# headers (e.g. because you're like me and trying to be clever and Python 3 compatible by using
# unicode_literals. This causes all kinds of horrible pains, as I've had to repeatedly notice. We could
# silently encode everything here, but I'll go the safer(?) route and just reject all unicode strings.
for k, v in self.options.headers.items():
if type(k) != str:
raise ValueError("'%s' header has a non-string name, but only (encoded/non-unicode) strings are allowed" % repr(k))
if type(v) != str:
raise ValueError("'%s' header has a non-string value, but only (encoded/non-unicode) strings are allowed: %s" % (k, repr(v)))
result = dict(stock, **self.options.headers)
log.debug('headers = %s', result)
return result
def succeeded(self, binding, reply):
"""
Request succeeded, process the reply
@param binding: The binding to be used to process the reply.
@type binding: L{bindings.binding.Binding}
@param reply: The raw reply text.
@type reply: str
@return: The method result.
@rtype: I{builtin}, L{Object}
@raise WebFault: On server.
"""
log.debug('http succeeded:\n%s', reply)
plugins = PluginContainer(self.options.plugins)
if len(reply) > 0:
reply, result = binding.get_reply(self.method, reply)
self.last_received(reply)
else:
result = None
ctx = plugins.message.unmarshalled(reply=result)
result = ctx.reply
if self.options.faults:
return result
else:
return (200, result)
def failed(self, binding, error):
"""
Request failed, process reply based on reason
@param binding: The binding to be used to process the reply.
@type binding: L{suds.bindings.binding.Binding}
@param error: The http error message
@type error: L{transport.TransportError}
"""
status, reason = (error.httpcode, tostr(error))
reply = error.fp.read()
log.debug('http failed:\n%s', reply)
if status == 500:
if len(reply) > 0:
r, p = binding.get_fault(reply)
self.last_received(r)
return (status, p)
else:
return (status, None)
if self.options.faults:
raise TransportError(reason, status)
else:
return (status, None)
def location(self):
p = Unskin(self.options)
return p.get('location', self.method.location)
def last_sent(self, d=None):
key = 'tx'
messages = self.client.messages
if d is None:
return messages.get(key)
else:
messages[key] = d
def last_received(self, d=None):
key = 'rx'
messages = self.client.messages
if d is None:
return messages.get(key)
else:
messages[key] = d
class SimClient(SoapClient):
"""
Loopback client used for message/reply simulation.
"""
injkey = '__inject'
@classmethod
def simulation(cls, kwargs):
""" get whether loopback has been specified in the I{kwargs}. """
return kwargs.has_key(SimClient.injkey)
def invoke(self, args, kwargs):
"""
Send the required soap message to invoke the specified method
@param args: A list of args for the method invoked.
@type args: list
@param kwargs: Named (keyword) args for the method invoked.
@type kwargs: dict
@return: The result of the method invocation.
@rtype: I{builtin} or I{subclass of} L{Object}
"""
simulation = kwargs[self.injkey]
msg = simulation.get('msg')
reply = simulation.get('reply')
fault = simulation.get('fault')
if msg is None:
if reply is not None:
return self.__reply(reply, args, kwargs)
if fault is not None:
return self.__fault(fault)
raise Exception('(reply|fault) expected when msg=None')
sax = Parser()
msg = sax.parse(string=msg)
return self.send(msg)
def __reply(self, reply, args, kwargs):
""" simulate the reply """
binding = self.method.binding.input
msg = binding.get_message(self.method, args, kwargs)
log.debug('inject (simulated) send message:\n%s', msg)
binding = self.method.binding.output
return self.succeeded(binding, reply)
def __fault(self, reply):
""" simulate the (fault) reply """
binding = self.method.binding.output
if self.options.faults:
r, p = binding.get_fault(reply)
self.last_received(r)
return (500, p)
else:
return (500, None)
class RequestContext:
"""
A request context.
Returned when the ''nosend'' options is specified.
@ivar client: The suds client.
@type client: L{Client}
@ivar binding: The binding for this request.
@type binding: I{Binding}
@ivar envelope: The request soap envelope.
@type envelope: str
"""
def __init__(self, client, binding, envelope):
"""
@param client: The suds client.
@type client: L{Client}
@param binding: The binding for this request.
@type binding: I{Binding}
@param envelope: The request soap envelope.
@type envelope: str
"""
self.client = client
self.binding = binding
self.envelope = envelope
def succeeded(self, reply):
"""
Re-entry for processing a successful reply.
@param reply: The reply soap envelope.
@type reply: str
@return: The returned value for the invoked method.
@rtype: object
"""
options = self.client.options
plugins = PluginContainer(options.plugins)
ctx = plugins.message.received(reply=reply)
reply = ctx.reply
return self.client.succeeded(self.binding, reply)
def failed(self, error):
"""
Re-entry for processing a failure reply.
@param error: The error returned by the transport.
@type error: A suds I{TransportError}.
"""
return self.client.failed(self.binding, error)
| lgpl-3.0 |
kisel/trex-core | scripts/stl/udp_1pkt_mac.py | 2 | 1115 | from trex_stl_lib.api import *
# 10 clients override the LSB of destination
class STLS1(object):
def __init__ (self):
self.fsize =64; # the size of the packet
def create_stream (self):
# Create base packet and pad it to size
size = self.fsize - 4; # HW will add 4 bytes ethernet FCS
base_pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)
pad = max(0, size - len(base_pkt)) * 'x'
vm = STLScVmRaw( [ STLVmFlowVar(name="mac_src", min_value=1, max_value=10, size=1, op="inc"), # 1 byte varible, range 1-10
STLVmWrFlowVar(fv_name="mac_src", pkt_offset= 11) # write it to LSB of SRC
]
)
return STLStream(packet = STLPktBuilder(pkt = base_pkt/pad,vm = vm),
mode = STLTXCont( pps=10 ))
def get_streams (self, direction = 0, **kwargs):
# create 1 stream
return [ self.create_stream() ]
# dynamic load - used for trex console or simulator
def register():
return STLS1()
| apache-2.0 |
dl1ksv/gnuradio | gr-blocks/python/blocks/qa_peak_detector.py | 5 | 2088 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, gr_unittest, blocks
class test_peak_detector(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_01(self):
tb = self.tb
data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
expected_result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
src = blocks.vector_source_f(data, False)
regen = blocks.peak_detector_fb()
dst = blocks.vector_sink_b()
tb.connect(src, regen)
tb.connect(regen, dst)
tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
def test_02(self):
tb = self.tb
data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
expected_result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
src = blocks.vector_source_i(data, False)
regen = blocks.peak_detector_ib()
dst = blocks.vector_sink_b()
tb.connect(src, regen)
tb.connect(regen, dst)
tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
def test_03(self):
tb = self.tb
data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
expected_result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
src = blocks.vector_source_s(data, False)
regen = blocks.peak_detector_sb()
dst = blocks.vector_sink_b()
tb.connect(src, regen)
tb.connect(regen, dst)
tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
if __name__ == '__main__':
gr_unittest.run(test_peak_detector)
| gpl-3.0 |
blockstack/blockstack-server | integration_tests/blockstack_integration_tests/scenarios/namespace_preorder_burn.py | 1 | 4021 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Blockstack
~~~~~
copyright: (c) 2014-2015 by Halfmoon Labs, Inc.
copyright: (c) 2016 by Blockstack.org
This file is part of Blockstack
Blockstack is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
"""
import testlib
import virtualchain
wallets = [
testlib.Wallet( "5JesPiN68qt44Hc2nT8qmyZ1JDwHebfoh9KQ52Lazb1m1LaKNj9", 100000000000 ),
testlib.Wallet( "5KHqsiU9qa77frZb6hQy9ocV7Sus9RWJcQGYYBJJBb2Efj1o77e", 100000000000 ),
testlib.Wallet( "5Kg5kJbQHvk1B64rJniEmgbD83FpZpbw2RjdAZEzTefs9ihN3Bz", 100000000000 ),
]
consensus = "17ac43c1d8549c3181b200f1bf97eb7d"
preorder_block = None
reveal_block = None
def scenario( wallets, **kw ):
global reveal_block
global preorder_block
res = testlib.blockstack_namespace_preorder( "test", wallets[1].addr, wallets[0].privkey, tx_only=True, expect_fail=True)
ns_preorder_txhex = res['transaction']
# change the burn address
ns_preorder_tx = virtualchain.btc_tx_deserialize(ns_preorder_txhex)
ns_preorder_tx['outs'][2]['script'] = virtualchain.btc_make_payment_script(wallets[2].addr)
for i in ns_preorder_tx['ins']:
i['script'] = ''
utxos = testlib.get_utxos(wallets[0].addr)
ns_preorder_txhex = virtualchain.btc_tx_serialize(ns_preorder_tx)
ns_preorder_txhex_signed = virtualchain.tx_sign_all_unsigned_inputs(wallets[0].privkey, utxos, ns_preorder_txhex)
print ns_preorder_txhex_signed
res = testlib.broadcast_transaction(ns_preorder_txhex_signed)
if 'error' in res:
print res
return False
print res
testlib.next_block(**kw)
num_ops = virtualchain.lib.indexer.StateEngine.get_block_statistics(testlib.get_current_block(**kw))
if num_ops['num_parsed_ops'] != 1:
print 'processed ops: {}'.format(num_ops)
return False
# try again, but use the right burn address
testlib.blockstack_namespace_preorder( "test", wallets[1].addr, wallets[0].privkey )
preorder_block = testlib.get_current_block( **kw ) + 1
testlib.next_block( **kw )
testlib.blockstack_namespace_reveal( "test", wallets[1].addr, 52595, 250, 4, [6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0], 10, 10, wallets[0].privkey )
reveal_block = testlib.get_current_block( **kw ) + 1
testlib.next_block( **kw )
def check( state_engine ):
global reveal_block, preorder_block
# the namespace has to have been revealed
ns = state_engine.get_namespace_reveal( "test" )
if ns is None:
return False
if ns["namespace_id"] != "test":
print "wrong namespace ID"
return False
if ns["lifetime"] != 52595:
print "wrong lifetime"
return False
if ns["coeff"] != 250:
print "wrong coeff"
return False
if ns["base"] != 4:
print "wrong base"
return False
if ns["buckets"] != [6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0]:
print "wrong buckets"
return False
if ns["no_vowel_discount"] != 10:
print "wrong no-vowel discount"
return False
if ns["nonalpha_discount"] != 10:
print "wrong nonalpha discount"
return False
if ns["reveal_block"] != reveal_block:
print "wrong reveal block (%s)" % reveal_block
return False
if ns["block_number"] != preorder_block:
print "wrong block number"
return False
return True
| gpl-3.0 |
hackersql/sq1map | LFI-Tools/fimap/src/report.py | 5 | 5778 | #
# This file is part of fimap.
#
# Copyright(c) 2009-2012 Iman Karim(ikarim2s@smail.inf.fh-brs.de).
# http://fimap.googlecode.com
#
# This file may be licensed under the terms of of the
# GNU General Public License Version 2 (the ``GPL'').
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the GPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the GPL along with this
# program. If not, go to http://www.gnu.org/licenses/gpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
__author__="Iman Karim(ikarim2s@smail.inf.fh-brs.de)"
__date__ ="$01.09.2009 09:56:24$"
class report:
def __init__(self, URL, Params, VulnKey):
self.URL = URL
self.Prefix = None
self.Surfix = ""
self.Appendix = None
self.VulnKey = VulnKey
self.VulnKeyVal = None
self.Params = Params
self.SuffixBreakable = None
self.SuffixBreakTechName = None
self.ServerPath = None
self.ServerScript = None
self.RemoteInjectable = False
self.isLinux = True
self.BlindDiscovered = False
self.PostData = None
self.isPost = 0
self.language = None
self.VulnHeaderKey = None
self.HeaderDict = None
def setVulnHeaderKey(self, headerkey):
self.VulnHeaderKey = headerkey
def setHeader(self, header):
self.HeaderDict = header
def setLanguage(self, lang):
self.language = lang
def getLanguage(self):
return(self.language)
def isLanguageSet(self):
return(self.language != None)
def setPostData(self, p):
self.PostData = p
def setPost(self, b):
self.isPost = b
def getPostData(self):
return(self.PostData)
def getVulnHeader(self):
if (self.VulnHeaderKey == None):
return("")
return(self.VulnHeaderKey)
def getHeader(self):
return(self.HeaderDict)
def isPost(self):
return(self.isPost)
def setWindows(self):
self.isLinux = False
def isWindows(self):
return(not self.isLinux)
def setLinux(self):
self.isLinux = True
def isLinux(self):
return(self.isLinux)
def isUnix(self):
return(self.isLinux)
def setVulnKeyVal(self, val):
self.VulnKeyVal = val
def getVulnKeyVal(self):
return(self.VulnKeyVal)
def setPrefix(self, path):
self.Prefix = path
def getPrefix(self):
return(self.Prefix)
def setSurfix(self, txt):
if (self.Appendix == None):
self.Appendix = txt
self.Surfix = txt
def getSurfix(self):
return(self.Surfix)
def isBlindDiscovered(self):
return(self.BlindDiscovered)
def setBlindDiscovered(self, bd):
self.BlindDiscovered = bd
def setServerPath(self, sP):
self.ServerPath = sP
def getServerPath(self):
return(self.ServerPath)
def setServerScript(self, sP):
self.ServerScript = sP
def getServerScript(self):
return(self.ServerScript)
def getAppendix(self):
return(self.Appendix)
def isAbsoluteInjection(self):
return(self.getPrefix() == "")
def isRelativeInjection(self):
return(self.getPrefix().startswith("..") or self.getPrefix().startswith("/.."))
def getVulnKey(self):
return(self.VulnKey)
def getURL(self):
return(self.URL)
def isRemoteInjectable(self):
return(self.RemoteInjectable)
def setRemoteInjectable(self, ri):
self.RemoteInjectable = ri
def getParams(self):
return(self.Params)
def setSuffixBreakable(self, isPossible):
self.SuffixBreakable = isPossible
def isSuffixBreakable(self):
return(self.SuffixBreakable)
def setSuffixBreakTechName(self, name):
self.SuffixBreakTechName = name
def getSuffixBreakTechName(self):
return(self.SuffixBreakTechName)
def getType(self):
ret = ""
if (self.isBlindDiscovered()):
return("Blindly Identified")
if (self.getPrefix() == None):
return("Not checked.")
elif (self.isAbsoluteInjection()):
if (self.getAppendix() == ""):
ret = "Absolute Clean"
else:
ret = "Absolute with appendix '%s'" %(self.getAppendix())
elif (self.isRelativeInjection()):
if (self.getAppendix() == ""):
ret = "Relative Clean"
else:
ret = "Relative with appendix '%s'" %(self.getAppendix())
else:
return("Unknown (%s | %s | %s)" %(self.getPrefix(), self.isRelativeInjection(), self.isAbsoluteInjection()))
if (self.isRemoteInjectable()):
ret = ret + " + Remote injection"
return(ret)
def getDomain(self, url=None):
if url==None:
url = self.URL
domain = url[url.find("//")+2:]
domain = domain[:domain.find("/")]
return(domain)
def getPath(self):
url = self.getURL()
url = url[url.find("//")+2:]
url = url[url.find("/"):]
return(url)
def autoDetectLanguageByExtention(self, languageSets):
for Name, langClass in languageSets.items():
exts = langClass.getExtentions()
for ext in exts:
if (self.URL.find(ext) != -1):
self.setLanguage(Name)
return(True)
return(False)
| gpl-3.0 |
eharney/nova | nova/api/openstack/compute/contrib/security_group_default_rules.py | 8 | 8249 | # Copyright 2013 Metacloud Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from webob import exc
from nova.api.openstack.compute.contrib import security_groups as sg
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova.network.security_group import openstack_driver
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import xmlutils
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute',
'security_group_default_rules')
sg_nsmap = {None: wsgi.XMLNS_V11}
def make_default_rule(elem):
elem.set('id')
proto = xmlutil.SubTemplateElement(elem, 'ip_protocol')
proto.text = 'ip_protocol'
from_port = xmlutil.SubTemplateElement(elem, 'from_port')
from_port.text = 'from_port'
to_port = xmlutil.SubTemplateElement(elem, 'to_port')
to_port.text = 'to_port'
ip_range = xmlutil.SubTemplateElement(elem, 'ip_range',
selector='ip_range')
cidr = xmlutil.SubTemplateElement(ip_range, 'cidr')
cidr.text = 'cidr'
class SecurityGroupDefaultRulesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('security_group_default_rules')
elem = xmlutil.SubTemplateElement(root, 'security_group_default_rule',
selector='security_group_default_rules')
make_default_rule(elem)
return xmlutil.MasterTemplate(root, 1, nsmap=sg_nsmap)
class SecurityGroupDefaultRuleTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('security_group_default_rule',
selector='security_group_default_rule')
make_default_rule(root)
return xmlutil.MasterTemplate(root, 1, nsmap=sg_nsmap)
class SecurityGroupDefaultRulesXMLDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = xmlutils.safe_minidom_parse_string(string)
security_group_rule = self._extract_security_group_default_rule(dom)
return {'body': {'security_group_default_rule': security_group_rule}}
def _extract_security_group_default_rule(self, node):
sg_rule = {}
sg_rule_node = self.find_first_child_named(node,
'security_group_default_rule')
if sg_rule_node is not None:
ip_protocol_node = self.find_first_child_named(sg_rule_node,
"ip_protocol")
if ip_protocol_node is not None:
sg_rule['ip_protocol'] = self.extract_text(ip_protocol_node)
from_port_node = self.find_first_child_named(sg_rule_node,
"from_port")
if from_port_node is not None:
sg_rule['from_port'] = self.extract_text(from_port_node)
to_port_node = self.find_first_child_named(sg_rule_node, "to_port")
if to_port_node is not None:
sg_rule['to_port'] = self.extract_text(to_port_node)
cidr_node = self.find_first_child_named(sg_rule_node, "cidr")
if cidr_node is not None:
sg_rule['cidr'] = self.extract_text(cidr_node)
return sg_rule
class SecurityGroupDefaultRulesController(sg.SecurityGroupControllerBase):
def __init__(self):
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
@wsgi.serializers(xml=SecurityGroupDefaultRuleTemplate)
@wsgi.deserializers(xml=SecurityGroupDefaultRulesXMLDeserializer)
def create(self, req, body):
context = sg._authorize_context(req)
authorize(context)
sg_rule = self._from_body(body, 'security_group_default_rule')
try:
values = self._rule_args_to_dict(to_port=sg_rule.get('to_port'),
from_port=sg_rule.get('from_port'),
ip_protocol=sg_rule.get('ip_protocol'),
cidr=sg_rule.get('cidr'))
except Exception as exp:
raise exc.HTTPBadRequest(explanation=unicode(exp))
if values is None:
msg = _('Not enough parameters to build a valid rule.')
raise exc.HTTPBadRequest(explanation=msg)
if self.security_group_api.default_rule_exists(context, values):
msg = _('This default rule already exists.')
raise exc.HTTPBadRequest(explanation=msg)
security_group_rule = self.security_group_api.add_default_rules(
context, [values])[0]
fmt_rule = self._format_security_group_default_rule(
security_group_rule)
return {'security_group_default_rule': fmt_rule}
def _rule_args_to_dict(self, to_port=None, from_port=None,
ip_protocol=None, cidr=None):
cidr = self.security_group_api.parse_cidr(cidr)
return self.security_group_api.new_cidr_ingress_rule(
cidr, ip_protocol, from_port, to_port)
@wsgi.serializers(xml=SecurityGroupDefaultRuleTemplate)
def show(self, req, id):
context = sg._authorize_context(req)
authorize(context)
id = self.security_group_api.validate_id(id)
LOG.debug(_("Showing security_group_default_rule with id %s") % id)
try:
rule = self.security_group_api.get_default_rule(context, id)
except exception.SecurityGroupDefaultRuleNotFound:
msg = _("security group default rule not found")
raise exc.HTTPNotFound(explanation=msg)
fmt_rule = self._format_security_group_default_rule(rule)
return {"security_group_default_rule": fmt_rule}
def delete(self, req, id):
context = sg._authorize_context(req)
authorize(context)
id = self.security_group_api.validate_id(id)
rule = self.security_group_api.get_default_rule(context, id)
self.security_group_api.remove_default_rules(context, [rule['id']])
return webob.Response(status_int=204)
@wsgi.serializers(xml=SecurityGroupDefaultRulesTemplate)
def index(self, req):
context = sg._authorize_context(req)
authorize(context)
ret = {'security_group_default_rules': []}
for rule in self.security_group_api.get_all_default_rules(context):
rule_fmt = self._format_security_group_default_rule(rule)
ret['security_group_default_rules'].append(rule_fmt)
return ret
def _format_security_group_default_rule(self, rule):
sg_rule = {}
sg_rule['id'] = rule['id']
sg_rule['ip_protocol'] = rule['protocol']
sg_rule['from_port'] = rule['from_port']
sg_rule['to_port'] = rule['to_port']
sg_rule['ip_range'] = {}
sg_rule['ip_range'] = {'cidr': rule['cidr']}
return sg_rule
class Security_group_default_rules(extensions.ExtensionDescriptor):
"""Default rules for security group support."""
name = "SecurityGroupDefaultRules"
alias = "os-security-group-default-rules"
namespace = ("http://docs.openstack.org/compute/ext/"
"securitygroupdefaultrules/api/v1.1")
updated = "2013-02-05T00:00:00+00:00"
def get_resources(self):
resources = [
extensions.ResourceExtension('os-security-group-default-rules',
SecurityGroupDefaultRulesController(),
collection_actions={'create': 'POST',
'delete': 'DELETE',
'index': 'GET'},
member_actions={'show': 'GET'})]
return resources
| apache-2.0 |
LogicalDash/kivy | kivy/uix/behaviors/cover.py | 18 | 4443 | '''
Cover Behavior
==============
The :class:`~kivy.uix.behaviors.cover.CoverBehavior`
`mixin <https://en.wikipedia.org/wiki/Mixin>`_ is intended for rendering
textures to full widget size keeping the aspect ratio of the original texture.
Use cases are i.e. rendering full size background images or video content in
a dynamic layout.
For an overview of behaviors, please refer to the :mod:`~kivy.uix.behaviors`
documentation.
Example
-------
The following examples add cover behavior to an image:
In python:
.. code-block:: python
from kivy.app import App
from kivy.uix.behaviors import CoverBehavior
from kivy.uix.image import Image
class CoverImage(CoverBehavior, Image):
def __init__(self, **kwargs):
super(CoverImage, self).__init__(**kwargs)
texture = self._coreimage.texture
self.reference_size = texture.size
self.texture = texture
class MainApp(App):
def build(self):
return CoverImage(source='image.jpg')
MainApp().run()
In Kivy Language:
.. code-block:: kv
CoverImage:
source: 'image.png'
<CoverImage@CoverBehavior+Image>:
reference_size: self.texture_size
See :class:`~kivy.uix.behaviors.cover.CoverBehavior` for details.
'''
__all__ = ('CoverBehavior', )
from decimal import Decimal
from kivy.lang import Builder
from kivy.properties import ListProperty
Builder.load_string("""
<-CoverBehavior>:
canvas.before:
StencilPush
Rectangle:
pos: self.pos
size: self.size
StencilUse
canvas:
Rectangle:
texture: self.texture
size: self.cover_size
pos: self.cover_pos
canvas.after:
StencilUnUse
Rectangle:
pos: self.pos
size: self.size
StencilPop
""")
class CoverBehavior(object):
'''The CoverBehavior `mixin <https://en.wikipedia.org/wiki/Mixin>`_
provides rendering a texture covering full widget size keeping aspect ratio
of the original texture.
.. versionadded:: 1.10.0
'''
reference_size = ListProperty([])
'''Reference size used for aspect ratio approximation calculation.
:attr:`reference_size` is a :class:`~kivy.properties.ListProperty` and
defaults to `[]`.
'''
cover_size = ListProperty([0, 0])
'''Size of the aspect ratio aware texture. Gets calculated in
``CoverBehavior.calculate_cover``.
:attr:`cover_size` is a :class:`~kivy.properties.ListProperty` and
defaults to `[0, 0]`.
'''
cover_pos = ListProperty([0, 0])
'''Position of the aspect ratio aware texture. Gets calculated in
``CoverBehavior.calculate_cover``.
:attr:`cover_pos` is a :class:`~kivy.properties.ListProperty` and
defaults to `[0, 0]`.
'''
def __init__(self, **kwargs):
super(CoverBehavior, self).__init__(**kwargs)
# bind covering
self.bind(
size=self.calculate_cover,
pos=self.calculate_cover
)
def _aspect_ratio_approximate(self, size):
# return a decimal approximation of an aspect ratio.
return Decimal('%.2f' % (float(size[0]) / size[1]))
def _scale_size(self, size, sizer):
# return scaled size based on sizer, where sizer (n, None) scales x
# to n and (None, n) scales y to n
size_new = list(sizer)
i = size_new.index(None)
j = i * -1 + 1
size_new[i] = (size_new[j] * size[i]) / size[j]
return tuple(size_new)
def calculate_cover(self, *args):
# return if no reference size yet
if not self.reference_size:
return
size = self.size
origin_appr = self._aspect_ratio_approximate(self.reference_size)
crop_appr = self._aspect_ratio_approximate(size)
# same aspect ratio
if origin_appr == crop_appr:
crop_size = self.size
offset = (0, 0)
# scale x
elif origin_appr < crop_appr:
crop_size = self._scale_size(self.reference_size, (size[0], None))
offset = (0, ((crop_size[1] - size[1]) / 2) * -1)
# scale y
else:
crop_size = self._scale_size(self.reference_size, (None, size[1]))
offset = (((crop_size[0] - size[0]) / 2) * -1, 0)
# set background size and position
self.cover_size = crop_size
self.cover_pos = offset
| mit |
xushuwei202/Vintageous | ex/parser/scanner_command_print.py | 9 | 1390 | from .state import EOF
from .tokens import TokenEof
from .tokens_base import TOKEN_COMMAND_PRINT
from .tokens_base import TokenOfCommand
from Vintageous import ex
@ex.command('print', 'p')
class TokenCommandPrint(TokenOfCommand):
def __init__(self, params, *args, **kwargs):
super().__init__(params,
TOKEN_COMMAND_PRINT,
'print', *args, **kwargs)
self.addressable = True
self.cooperates_with_global = True
self.target_command = 'ex_print'
def __str__(self):
return "{0} {1} {2}".format(self.content, ''.join(self.flags), self.count).strip()
@property
def count(self):
return self.params['count']
@property
def flags(self):
return self.params['flags']
def scan_command_print(state):
params = {
'count': '',
'flags': [],
}
while True:
c = state.consume()
state.skip(' ')
state.ignore()
if c == EOF:
return None, [TokenCommandPrint(params), TokenEof()]
if c.isdigit():
state.match(r'\d*')
params['count'] = state.emit()
continue
m = state.expect_match(r'[l#p]+')
params['flags'] = list(m.group(0))
state.ignore()
state.expect(EOF)
break
return None, [TokenCommandPrint(params), TokenEof()]
| mit |
maistrovas/My-Courses-Solutions | MITx-6.00.1x/ProblemSet4/test_ps4a.py | 25 | 6880 | from ps4a import *
#
# Test code
# You don't need to understand how this test code works (but feel free to look it over!)
# To run these tests, simply run this file (open up in IDLE, then run the file as normal)
def test_getWordScore():
"""
Unit test for getWordScore
"""
failure=False
# dictionary of words and scores
words = {("", 7):0, ("it", 7):4, ("was", 7):18, ("scored", 7):54, ("waybill", 7):155, ("outgnaw", 7):127, ("fork", 7):44, ("fork", 4):94}
for (word, n) in words.keys():
score = getWordScore(word, n)
if score != words[(word, n)]:
print "FAILURE: test_getWordScore()"
print "\tExpected", words[(word, n)], "points but got '" + str(score) + "' for word '" + word + "', n=" + str(n)
failure=True
if not failure:
print "SUCCESS: test_getWordScore()"
# end of test_getWordScore
def test_updateHand():
"""
Unit test for updateHand
"""
# test 1
handOrig = {'a':1, 'q':1, 'l':2, 'm':1, 'u':1, 'i':1}
handCopy = handOrig.copy()
word = "quail"
hand2 = updateHand(handCopy, word)
expectedHand1 = {'l':1, 'm':1}
expectedHand2 = {'a':0, 'q':0, 'l':1, 'm':1, 'u':0, 'i':0}
if hand2 != expectedHand1 and hand2 != expectedHand2:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tReturned: ", hand2, "\n\t-- but expected:", expectedHand1, "or", expectedHand2
return # exit function
if handCopy != handOrig:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tOriginal hand was", handOrig
print "\tbut implementation of updateHand mutated the original hand!"
print "\tNow the hand looks like this:", handCopy
return # exit function
# test 2
handOrig = {'e':1, 'v':2, 'n':1, 'i':1, 'l':2}
handCopy = handOrig.copy()
word = "evil"
hand2 = updateHand(handCopy, word)
expectedHand1 = {'v':1, 'n':1, 'l':1}
expectedHand2 = {'e':0, 'v':1, 'n':1, 'i':0, 'l':1}
if hand2 != expectedHand1 and hand2 != expectedHand2:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tReturned: ", hand2, "\n\t-- but expected:", expectedHand1, "or", expectedHand2
return # exit function
if handCopy != handOrig:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tOriginal hand was", handOrig
print "\tbut implementation of updateHand mutated the original hand!"
print "\tNow the hand looks like this:", handCopy
return # exit function
# test 3
handOrig = {'h': 1, 'e': 1, 'l': 2, 'o': 1}
handCopy = handOrig.copy()
word = "hello"
hand2 = updateHand(handCopy, word)
expectedHand1 = {}
expectedHand2 = {'h': 0, 'e': 0, 'l': 0, 'o': 0}
if hand2 != expectedHand1 and hand2 != expectedHand2:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tReturned: ", hand2, "\n\t-- but expected:", expectedHand1, "or", expectedHand2
return # exit function
if handCopy != handOrig:
print "FAILURE: test_updateHand('"+ word +"', " + str(handOrig) + ")"
print "\tOriginal hand was", handOrig
print "\tbut implementation of updateHand mutated the original hand!"
print "\tNow the hand looks like this:", handCopy
return # exit function
print "SUCCESS: test_updateHand()"
# end of test_updateHand
def test_isValidWord(wordList):
"""
Unit test for isValidWord
"""
failure=False
# test 1
word = "hello"
handOrig = getFrequencyDict(word)
handCopy = handOrig.copy()
if not isValidWord(word, handCopy, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected True, but got False for word: '" + word + "' and hand:", handOrig
failure = True
# Test a second time to see if wordList or hand has been modified
if not isValidWord(word, handCopy, wordList):
print "FAILURE: test_isValidWord()"
if handCopy != handOrig:
print "\tTesting word", word, "for a second time - be sure you're not modifying hand."
print "\tAt this point, hand ought to be", handOrig, "but it is", handCopy
else:
print "\tTesting word", word, "for a second time - have you modified wordList?"
wordInWL = word in wordList
print "The word", word, "should be in wordList - is it?", wordInWL
print "\tExpected True, but got False for word: '" + word + "' and hand:", handCopy
failure = True
# test 2
hand = {'r': 1, 'a': 3, 'p': 2, 'e': 1, 't': 1, 'u':1}
word = "rapture"
if isValidWord(word, hand, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected False, but got True for word: '" + word + "' and hand:", hand
failure = True
# test 3
hand = {'n': 1, 'h': 1, 'o': 1, 'y': 1, 'd':1, 'w':1, 'e': 2}
word = "honey"
if not isValidWord(word, hand, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected True, but got False for word: '"+ word +"' and hand:", hand
failure = True
# test 4
hand = {'r': 1, 'a': 3, 'p': 2, 't': 1, 'u':2}
word = "honey"
if isValidWord(word, hand, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected False, but got True for word: '" + word + "' and hand:", hand
failure = True
# test 5
hand = {'e':1, 'v':2, 'n':1, 'i':1, 'l':2}
word = "evil"
if not isValidWord(word, hand, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected True, but got False for word: '" + word + "' and hand:", hand
failure = True
# test 6
word = "even"
if isValidWord(word, hand, wordList):
print "FAILURE: test_isValidWord()"
print "\tExpected False, but got True for word: '" + word + "' and hand:", hand
print "\t(If this is the only failure, make sure isValidWord() isn't mutating its inputs)"
failure = True
if not failure:
print "SUCCESS: test_isValidWord()"
wordList = loadWords()
print "----------------------------------------------------------------------"
print "Testing getWordScore..."
test_getWordScore()
print "----------------------------------------------------------------------"
print "Testing updateHand..."
test_updateHand()
print "----------------------------------------------------------------------"
print "Testing isValidWord..."
test_isValidWord(wordList)
print "----------------------------------------------------------------------"
print "All done!"
| mit |
frederick623/pb | fa_collateral_upload/HTI_CashEntry_Report.py | 2 | 16805 | import acm
import ael
import FHTI_EDD_OTC_Util
import HTI_ExcelReport2
import HTI_Util
import HTI_FeedTrade_EDD_Util
import os
from shutil import copyfile
ttSaveToFile = "Check this to save the report instead of showing it."
ttCSV = "Check this to export the report in CSV format"
ttFileName = "File name and path of the report. YYYYMMDD in the file name will be replaced by the valuation date."
ttSendMail = "Send report as email attachment."
Client_Code = 0
Client_Name = 1
Trade_Reference = 2
Trade_Date = 3
Type = 4
Currency = 5
Amount = 6
EXT_REF = 7
def getFx(dt, fm_ccy, to_ccy, currclspricemkt, histclspricemkt):
ins_fm_ccy = ael.Instrument[fm_ccy]
ins_to_ccy = ael.Instrument[to_ccy]
ins_usd = ael.Instrument['USD']
try:
if dt == ael.date_today():
fm_usd_rate = ins_fm_ccy.used_price(ael.date_today(), ins_usd.insid, 'Last', 0, currclspricemkt)
to_usd_rate = ins_usd.used_price(ael.date_today(), ins_to_ccy.insid, 'Last', 0, currclspricemkt)
fx_rate = fm_usd_rate * to_usd_rate
else:
fm_usd_rate = ins_fm_ccy.used_price(dt, ins_usd.insid, 'Close', 0, histclspricemkt)
to_usd_rate = ins_usd.used_price(dt, ins_to_ccy.insid, 'Close', 0, histclspricemkt)
fx_rate = fm_usd_rate * to_usd_rate
except:
fx_rate = 0.0
return fx_rate
def report_compare(x, y):
return 1
'''
if x[Contract_Date] == '':
return -1
if y[Contract_Date] == '':
return 1
if ael.date(x[Contract_Date]).to_string('%Y%m%d') > ael.date(y[Contract_Date]).to_string('%Y%m%d'):
return 1
elif ael.date(x[Contract_Date]).to_string('%Y%m%d') < ael.date(y[Contract_Date]).to_string('%Y%m%d'):
return -1
if x[Contract_No] > y[Contract_No]:
return 1
elif x[Contract_No] < y[Contract_No]:
return -1
if x[Counterparty] > y[Counterparty]:
return 1
elif x[Counterparty] < y[Counterparty]:
return -1
return 0
'''
def disable_variables(variables, enable = 0):
for i in variables:
for j in ael_variables:
if i == j[0]:
j[9] = enable
def cb(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToFile':
disable_variables(('fileName',), fieldValues[index])
return fieldValues
def cb2(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'sendEmail':
disable_variables(('emaillist',), fieldValues[index])
disable_variables(('subject',), fieldValues[index])
return fieldValues
def cb3(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToCSV':
disable_variables(('fileName',), fieldValues[index])
return fieldValues
def cb4(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToNetwork':
disable_variables(('networkDriveLocation',), fieldValues[index])
return fieldValues
ael_variables = [['asofdate', 'Date', 'string', [str(ael.date_today()), 'Today'], 'Today', 1, 0, 'Report Date', None, 1], \
['saveToFile', 'Save to file', 'int', [1, 0], 0, 0, 0, ttSaveToFile, cb, None], \
['fileName', 'File name', 'string', None, 'c:\\temp\\SecurityLoan_YYYYMMDD', 0, 0, ttFileName, None, 0], \
['sendEmail', 'Send mail', 'int', [1, 0], 0, 0, 0, ttSendMail, cb2, None], \
['emaillist', 'Email', 'string', None, 'louis.ck.wong@htisec.com', 0, 0, 'Email List', None, 0], \
['acquirers', 'Acquirer(s)', 'string', HTI_Util.getAllAcquirers(), None, 1, 1, 'Acquirer(s)', None, 1], \
['counterparties', 'Counterparty(s)', 'string', HTI_Util.getAllParties(), None, 0, 1, 'Counterparty(s)', None, 1], \
['portfolio', 'Portfolio', 'string', HTI_Util.getAllPortfolios(), None, 1, 1, 'Portfolio', None, 1], \
['subject', 'Email subject', 'string', None, 'FA4 (PROD) EDD Security Loan Report (TRS) asof @date', 1, 0, 'Email Subject', None, 0], \
['saveToCSV', 'CSV format', 'int', [1, 0], 0, 0, 0, ttCSV, None, None], \
['title', 'Report title', 'string', None, 'Cash Entry Report as of @date', 1, 0, 'Report Title', None, 1],
['period', 'Report Period', 'string', ['Inception', 'MTD'], 'Inception', 1, 0, 'Report Period', None, 1],
['currclspricemkt', 'Current Closing Price Market', 'string', None, '', 1, 0, 'Current Closing Price Market', None, 1],
['histclspricemkt', 'Historical Closing Price Market', 'string', None, '', 1, 0, 'Historical Closing Price Market', None, 1],
['base_ccy', 'Base Ccy', 'string', None, '', 1, 0, 'Base Ccy', None, 1], \
['fileperpty', 'Separte File for Counterparty', 'int', [1, 0], 0, 0, 0, 'Separte File for Counterparty', None, None], \
['saveToNetwork', 'Copy to Network', 'int', [1, 0], 0, 0, 0, 'Copy to Network Drive', cb4, None], \
['networkDriveLocation', 'Network Drive Location', 'string', None, 'C:\\temp\\PositionReport', 0, 0, 'Network Drive Location', None, 0]]
def ael_main(dict):
asofdate = dict['asofdate']
if asofdate == 'Today':
asofdate = ael.date_today()
asofdate = ael.date(asofdate)
title = dict['title'].replace('@date', str(asofdate))
period = dict['period']
subject = dict['subject'].replace('@date', str(asofdate))
saveToFile = dict['saveToFile']
saveToCSV = dict['saveToCSV']
fileName = dict['fileName']
sendEmail = dict['sendEmail']
emailList = dict['emaillist']
recipients = emailList.split(',')
fileName = fileName.replace("YYYYMMDD", asofdate.to_string('%Y%m%d'))
fileperpty = dict['fileperpty']
saveToNetwork = dict['saveToNetwork']
networkDriveLocation = dict['networkDriveLocation']
# Portfolios
portfolios = dict['portfolio']
portfolioList2 = []
pf_list = ''
portfolioList2.extend(portfolios)
for port in portfolioList2:
prfid = port
pfarr = []
pPf = ael.Portfolio[prfid]
HTI_FeedTrade_EDD_Util.getChildPortfolio(pPf, pfarr)
if len(pfarr) > 0:
for pf in pfarr:
if len(pf_list) != 0:
pf_list = pf_list + ','
pf_list = pf_list + "'" + pf + "'"
else:
if len(pf_list) != 0:
pf_list = pf_list + ','
pf_list = pf_list + "'" + prfid + "'"
# Acquirers
acq_array_list = dict['acquirers']
acq_list = ''
for acq in acq_array_list:
if acq_list == '':
acq_list = "'" + acq + "'"
else:
acq_list = acq_list + ",'" + acq + "'"
# Counterparties
pty_array_list = dict['counterparties']
pty_list = ''
for pty in pty_array_list:
if pty_list == '':
pty_list = "'" + pty + "'"
else:
pty_list = pty_list + ",'" + pty + "'"
currclspricemkt = dict['currclspricemkt']
histclspricemkt = dict['histclspricemkt']
base_ccy = dict['base_ccy']
if not fileperpty:
genCashEntryRpt(asofdate, pf_list, acq_list, pty_list, base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, fileName, fileperpty, period)
else:
strSql = getReportSql()
strSql = strSql.replace('@dt', asofdate.add_days(1).to_string('%Y-%m-%d'))
strSql = strSql.replace('@portfolio_list', pf_list)
strSql = strSql.replace('@accquirer_list', acq_list)
if pty_list != '':
counterparty_list_sql = 'and cpty.ptyid in (@counterparty_list)'
counterparty_list_sql = counterparty_list_sql.replace("@counterparty_list", pty_list)
strSql = strSql.replace("@counterparty_list_sql", counterparty_list_sql)
else:
strSql = strSql.replace("@counterparty_list_sql", ' ')
'''
if period == 'MTD':
fm_date = asofdate.first_day_of_month()
print 'fm_date', fm_date, ael.date(fm_date).to_string('%Y-%m-%d')
strSql = strSql.replace("@start_date", "and t.time >= '%s'" % (ael.date(fm_date).to_string('%Y-%m-%d')))
else:
strSql = strSql.replace("@start_date", ' ')
'''
strSql = strSql.replace("@start_date", ' ')
strSql = "select distinct cpty.ptyid " + strSql
print strSql
rs = ael.asql(strSql)
columns, buf = rs
rptContent = []
for table in buf:
for row in table:
ptyid = row[Client_Code]
ptyfileName = fileName.replace('[ptyid]', ptyid)
print 'ptyfileName', ptyfileName
dir_path = os.path.dirname(os.path.realpath(ptyfileName))
if os.path.exists(dir_path) == False:
os.mkdir(dir_path)
exact_ptyfileName = os.path.basename(ptyfileName)
print 'exact_ptyfileName',exact_ptyfileName
genCashEntryRpt(asofdate, pf_list, acq_list, "'"+ptyid+"'", base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, ptyfileName, fileperpty, period)
#print ptyfileName, destination_fileName
if saveToNetwork:
destination_fileName = networkDriveLocation + '\\' + exact_ptyfileName
#print 'destination_fileName', destination_fileName
destination_fileName = destination_fileName.replace('[ptyid]', ptyid)
#print 'destination_fileName', destination_fileName
dir_path = os.path.dirname(os.path.realpath(destination_fileName))
#print 'dir_path', dir_path
if os.path.exists(dir_path) == False:
#print 'dir_path exist', dir_path
os.mkdir(dir_path)
copyfile(ptyfileName+'.xlsx', destination_fileName+'.xlsx')
def monthCode(intMonth):
switcher = {
1: "Jan",
2: "Feb",
3: "Mar",
4: "Apr",
5: "May",
6: "Jun",
7: "Jul",
8: "Aug",
9: "Sep",
10: "Oct",
11: "Nov",
12: "Dec",
}
return switcher.get(intMonth, "")
def getReportSql():
strSql = """from trade t, instrument i, payment p, party cpty, party a, portfolio pf, instrument c
where t.insaddr = i.insaddr
and i.instype = 'Curr'
and t.type = 'Cash Entry'
and t.trdnbr = p.trdnbr
and t.counterparty_ptynbr = cpty.ptynbr
and t.acquirer_ptynbr = a.ptynbr
and p.curr = c.insaddr
and a.ptyid in (@accquirer_list)
@counterparty_list_sql
and t.prfnbr = pf.prfnbr
and pf.prfid in (@portfolio_list)
@start_date
and t.time < '@dt'
and t.status not in ('Void', 'Simulated')
"""
print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!', strSql
return strSql
def genCashEntryRpt(asofdate, pf_list, acq_list, pty_list, base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, fileName, fileperpty, period):
report = ReportLayout(title)
font = HTI_ExcelReport2.Font()
font.bold = True
reportData = HTI_ExcelReport2.ReportData()
reportData.newSheet = True
reportData.headerText = ['Trade Date', 'Client Code', 'Client Name', 'Trade Reference', 'Type', 'Currency', 'Amount', 'External Reference']
strSql = getReportSql()
strSql = "select cpty.ptyid, cpty.fullname, t.trdnbr, t.time, p.type, c.insid, p.amount, add_info(t, 'External Reference') 'ext_ref' " + strSql
strSql = strSql.replace('@dt', asofdate.add_days(1).to_string('%Y-%m-%d'))
strSql = strSql.replace('@portfolio_list', pf_list)
strSql = strSql.replace('@accquirer_list', acq_list)
if pty_list != '':
counterparty_list_sql = 'and cpty.ptyid in (@counterparty_list)'
counterparty_list_sql = counterparty_list_sql.replace("@counterparty_list", pty_list)
strSql = strSql.replace("@counterparty_list_sql", counterparty_list_sql)
else:
strSql = strSql.replace("@counterparty_list_sql", ' ')
if period == 'MTD':
fm_date = asofdate.first_day_of_month()
strSql = strSql.replace("@start_date", "and t.time >= '%s'" % (ael.date(fm_date).to_string('%Y-%m-%d')))
else:
strSql = strSql.replace("@start_date", ' ')
print strSql
rs = ael.asql(strSql)
columns, buf = rs
rptContent = []
for table in buf:
for row in table:
client_code = row[Client_Code]
client_name = row[Client_Name]
trade_ref = row[Trade_Reference]
type = row[Type]
currency = row[Currency]
amt = row[Amount]
ext_ref = row[EXT_REF]
acm_trd = acm.FTrade[trade_ref]
if acm_trd != None:
print acm_trd
trade_date = acm.Time.DateFromTime(acm_trd.TradeTime())
rptRow = [str(trade_date), client_code, client_name, str(trade_ref), type, currency, str(amt), ext_ref]
rptContent.append(rptRow)
rptContent.sort(report_compare)
reportData.rows = rptContent
report.addReportData(reportData, {'SUM': [], 'COL_TEXT': [], 'CUSTOM_TEXT': {'COL': [], 'TEXT': []}})
if saveToFile:
if not saveToCSV:
try:
fileName = fileName + '.xlsx'
if os.path.exists(fileName):
os.remove(fileName)
except:
pass
if sendEmail and len(emailList) != 0:
report.saveNoQuit(fileName)
else:
report.save(fileName)
else:
fileName = fileName + '.csv'
if os.path.isfile(fileName):
os.remove(fileName)
csvData = []
csvData.append(reportData.headerText)
csvData = csvData + reportData.rows
print fileName
try:
outPutFile = open(fileName,'wb')
csvWriter = csv.writer(outPutFile, delimiter=',', quotechar='"')
for row in csvData:
csvWriter.writerow(row)
outPutFile.flush()
finally:
outPutFile.close()
else:
report.show()
class ReportLayout(HTI_ExcelReport2.CommonLayoutReport):
title = ''
Amount = 7
def __init__(self, title):
self.title = title
HTI_ExcelReport2.CommonLayoutReport.__init__(self)
def reportHeader(self, currentRow, reportIndex, excelApp):
# Write title
excelApp.Cells(currentRow[self.ROW], 1).Value = self.title
excelApp.Cells(currentRow[self.ROW], 1).Font.Bold = True
excelApp.Cells(currentRow[self.ROW], 1).Font.Size = 12
currentRow[self.ROW] = currentRow[self.ROW] + 1
HTI_ExcelReport2.CommonLayoutReport.reportHeader(self, currentRow, reportIndex, excelApp)
excelApp.Columns(self.Amount).NumberFormat = "#,##0.00"
def groupFooter(self, currentRow, reportIndex, group, excelApp):
HTI_ExcelReport2.CommonLayoutReport.groupFooter(self, currentRow, reportIndex, group, excelApp)
def reportEnd(self, excelApp):
HTI_ExcelReport2.CommonLayoutReport.reportEnd(self, excelApp)
#excelApp.Columns(self.Trade_Date).Select()
#excelApp.Selection.HorizontalAlignment = HTI_ExcelReport2.ExcelConstant.xlLeft
excelApp.Cells(1, 1).Select() | apache-2.0 |
Karosuo/Linux_tools | xls_handlers/xls_sum_venv/lib/python3.6/site-packages/pip/_vendor/idna/idnadata.py | 63 | 40899 | # This file is automatically generated by tools/idna-data
__version__ = "11.0.0"
scripts = {
'Greek': (
0x37000000374,
0x37500000378,
0x37a0000037e,
0x37f00000380,
0x38400000385,
0x38600000387,
0x3880000038b,
0x38c0000038d,
0x38e000003a2,
0x3a3000003e2,
0x3f000000400,
0x1d2600001d2b,
0x1d5d00001d62,
0x1d6600001d6b,
0x1dbf00001dc0,
0x1f0000001f16,
0x1f1800001f1e,
0x1f2000001f46,
0x1f4800001f4e,
0x1f5000001f58,
0x1f5900001f5a,
0x1f5b00001f5c,
0x1f5d00001f5e,
0x1f5f00001f7e,
0x1f8000001fb5,
0x1fb600001fc5,
0x1fc600001fd4,
0x1fd600001fdc,
0x1fdd00001ff0,
0x1ff200001ff5,
0x1ff600001fff,
0x212600002127,
0xab650000ab66,
0x101400001018f,
0x101a0000101a1,
0x1d2000001d246,
),
'Han': (
0x2e8000002e9a,
0x2e9b00002ef4,
0x2f0000002fd6,
0x300500003006,
0x300700003008,
0x30210000302a,
0x30380000303c,
0x340000004db6,
0x4e0000009ff0,
0xf9000000fa6e,
0xfa700000fada,
0x200000002a6d7,
0x2a7000002b735,
0x2b7400002b81e,
0x2b8200002cea2,
0x2ceb00002ebe1,
0x2f8000002fa1e,
),
'Hebrew': (
0x591000005c8,
0x5d0000005eb,
0x5ef000005f5,
0xfb1d0000fb37,
0xfb380000fb3d,
0xfb3e0000fb3f,
0xfb400000fb42,
0xfb430000fb45,
0xfb460000fb50,
),
'Hiragana': (
0x304100003097,
0x309d000030a0,
0x1b0010001b11f,
0x1f2000001f201,
),
'Katakana': (
0x30a1000030fb,
0x30fd00003100,
0x31f000003200,
0x32d0000032ff,
0x330000003358,
0xff660000ff70,
0xff710000ff9e,
0x1b0000001b001,
),
}
joining_types = {
0x600: 85,
0x601: 85,
0x602: 85,
0x603: 85,
0x604: 85,
0x605: 85,
0x608: 85,
0x60b: 85,
0x620: 68,
0x621: 85,
0x622: 82,
0x623: 82,
0x624: 82,
0x625: 82,
0x626: 68,
0x627: 82,
0x628: 68,
0x629: 82,
0x62a: 68,
0x62b: 68,
0x62c: 68,
0x62d: 68,
0x62e: 68,
0x62f: 82,
0x630: 82,
0x631: 82,
0x632: 82,
0x633: 68,
0x634: 68,
0x635: 68,
0x636: 68,
0x637: 68,
0x638: 68,
0x639: 68,
0x63a: 68,
0x63b: 68,
0x63c: 68,
0x63d: 68,
0x63e: 68,
0x63f: 68,
0x640: 67,
0x641: 68,
0x642: 68,
0x643: 68,
0x644: 68,
0x645: 68,
0x646: 68,
0x647: 68,
0x648: 82,
0x649: 68,
0x64a: 68,
0x66e: 68,
0x66f: 68,
0x671: 82,
0x672: 82,
0x673: 82,
0x674: 85,
0x675: 82,
0x676: 82,
0x677: 82,
0x678: 68,
0x679: 68,
0x67a: 68,
0x67b: 68,
0x67c: 68,
0x67d: 68,
0x67e: 68,
0x67f: 68,
0x680: 68,
0x681: 68,
0x682: 68,
0x683: 68,
0x684: 68,
0x685: 68,
0x686: 68,
0x687: 68,
0x688: 82,
0x689: 82,
0x68a: 82,
0x68b: 82,
0x68c: 82,
0x68d: 82,
0x68e: 82,
0x68f: 82,
0x690: 82,
0x691: 82,
0x692: 82,
0x693: 82,
0x694: 82,
0x695: 82,
0x696: 82,
0x697: 82,
0x698: 82,
0x699: 82,
0x69a: 68,
0x69b: 68,
0x69c: 68,
0x69d: 68,
0x69e: 68,
0x69f: 68,
0x6a0: 68,
0x6a1: 68,
0x6a2: 68,
0x6a3: 68,
0x6a4: 68,
0x6a5: 68,
0x6a6: 68,
0x6a7: 68,
0x6a8: 68,
0x6a9: 68,
0x6aa: 68,
0x6ab: 68,
0x6ac: 68,
0x6ad: 68,
0x6ae: 68,
0x6af: 68,
0x6b0: 68,
0x6b1: 68,
0x6b2: 68,
0x6b3: 68,
0x6b4: 68,
0x6b5: 68,
0x6b6: 68,
0x6b7: 68,
0x6b8: 68,
0x6b9: 68,
0x6ba: 68,
0x6bb: 68,
0x6bc: 68,
0x6bd: 68,
0x6be: 68,
0x6bf: 68,
0x6c0: 82,
0x6c1: 68,
0x6c2: 68,
0x6c3: 82,
0x6c4: 82,
0x6c5: 82,
0x6c6: 82,
0x6c7: 82,
0x6c8: 82,
0x6c9: 82,
0x6ca: 82,
0x6cb: 82,
0x6cc: 68,
0x6cd: 82,
0x6ce: 68,
0x6cf: 82,
0x6d0: 68,
0x6d1: 68,
0x6d2: 82,
0x6d3: 82,
0x6d5: 82,
0x6dd: 85,
0x6ee: 82,
0x6ef: 82,
0x6fa: 68,
0x6fb: 68,
0x6fc: 68,
0x6ff: 68,
0x70f: 84,
0x710: 82,
0x712: 68,
0x713: 68,
0x714: 68,
0x715: 82,
0x716: 82,
0x717: 82,
0x718: 82,
0x719: 82,
0x71a: 68,
0x71b: 68,
0x71c: 68,
0x71d: 68,
0x71e: 82,
0x71f: 68,
0x720: 68,
0x721: 68,
0x722: 68,
0x723: 68,
0x724: 68,
0x725: 68,
0x726: 68,
0x727: 68,
0x728: 82,
0x729: 68,
0x72a: 82,
0x72b: 68,
0x72c: 82,
0x72d: 68,
0x72e: 68,
0x72f: 82,
0x74d: 82,
0x74e: 68,
0x74f: 68,
0x750: 68,
0x751: 68,
0x752: 68,
0x753: 68,
0x754: 68,
0x755: 68,
0x756: 68,
0x757: 68,
0x758: 68,
0x759: 82,
0x75a: 82,
0x75b: 82,
0x75c: 68,
0x75d: 68,
0x75e: 68,
0x75f: 68,
0x760: 68,
0x761: 68,
0x762: 68,
0x763: 68,
0x764: 68,
0x765: 68,
0x766: 68,
0x767: 68,
0x768: 68,
0x769: 68,
0x76a: 68,
0x76b: 82,
0x76c: 82,
0x76d: 68,
0x76e: 68,
0x76f: 68,
0x770: 68,
0x771: 82,
0x772: 68,
0x773: 82,
0x774: 82,
0x775: 68,
0x776: 68,
0x777: 68,
0x778: 82,
0x779: 82,
0x77a: 68,
0x77b: 68,
0x77c: 68,
0x77d: 68,
0x77e: 68,
0x77f: 68,
0x7ca: 68,
0x7cb: 68,
0x7cc: 68,
0x7cd: 68,
0x7ce: 68,
0x7cf: 68,
0x7d0: 68,
0x7d1: 68,
0x7d2: 68,
0x7d3: 68,
0x7d4: 68,
0x7d5: 68,
0x7d6: 68,
0x7d7: 68,
0x7d8: 68,
0x7d9: 68,
0x7da: 68,
0x7db: 68,
0x7dc: 68,
0x7dd: 68,
0x7de: 68,
0x7df: 68,
0x7e0: 68,
0x7e1: 68,
0x7e2: 68,
0x7e3: 68,
0x7e4: 68,
0x7e5: 68,
0x7e6: 68,
0x7e7: 68,
0x7e8: 68,
0x7e9: 68,
0x7ea: 68,
0x7fa: 67,
0x840: 82,
0x841: 68,
0x842: 68,
0x843: 68,
0x844: 68,
0x845: 68,
0x846: 82,
0x847: 82,
0x848: 68,
0x849: 82,
0x84a: 68,
0x84b: 68,
0x84c: 68,
0x84d: 68,
0x84e: 68,
0x84f: 68,
0x850: 68,
0x851: 68,
0x852: 68,
0x853: 68,
0x854: 82,
0x855: 68,
0x856: 85,
0x857: 85,
0x858: 85,
0x860: 68,
0x861: 85,
0x862: 68,
0x863: 68,
0x864: 68,
0x865: 68,
0x866: 85,
0x867: 82,
0x868: 68,
0x869: 82,
0x86a: 82,
0x8a0: 68,
0x8a1: 68,
0x8a2: 68,
0x8a3: 68,
0x8a4: 68,
0x8a5: 68,
0x8a6: 68,
0x8a7: 68,
0x8a8: 68,
0x8a9: 68,
0x8aa: 82,
0x8ab: 82,
0x8ac: 82,
0x8ad: 85,
0x8ae: 82,
0x8af: 68,
0x8b0: 68,
0x8b1: 82,
0x8b2: 82,
0x8b3: 68,
0x8b4: 68,
0x8b6: 68,
0x8b7: 68,
0x8b8: 68,
0x8b9: 82,
0x8ba: 68,
0x8bb: 68,
0x8bc: 68,
0x8bd: 68,
0x8e2: 85,
0x1806: 85,
0x1807: 68,
0x180a: 67,
0x180e: 85,
0x1820: 68,
0x1821: 68,
0x1822: 68,
0x1823: 68,
0x1824: 68,
0x1825: 68,
0x1826: 68,
0x1827: 68,
0x1828: 68,
0x1829: 68,
0x182a: 68,
0x182b: 68,
0x182c: 68,
0x182d: 68,
0x182e: 68,
0x182f: 68,
0x1830: 68,
0x1831: 68,
0x1832: 68,
0x1833: 68,
0x1834: 68,
0x1835: 68,
0x1836: 68,
0x1837: 68,
0x1838: 68,
0x1839: 68,
0x183a: 68,
0x183b: 68,
0x183c: 68,
0x183d: 68,
0x183e: 68,
0x183f: 68,
0x1840: 68,
0x1841: 68,
0x1842: 68,
0x1843: 68,
0x1844: 68,
0x1845: 68,
0x1846: 68,
0x1847: 68,
0x1848: 68,
0x1849: 68,
0x184a: 68,
0x184b: 68,
0x184c: 68,
0x184d: 68,
0x184e: 68,
0x184f: 68,
0x1850: 68,
0x1851: 68,
0x1852: 68,
0x1853: 68,
0x1854: 68,
0x1855: 68,
0x1856: 68,
0x1857: 68,
0x1858: 68,
0x1859: 68,
0x185a: 68,
0x185b: 68,
0x185c: 68,
0x185d: 68,
0x185e: 68,
0x185f: 68,
0x1860: 68,
0x1861: 68,
0x1862: 68,
0x1863: 68,
0x1864: 68,
0x1865: 68,
0x1866: 68,
0x1867: 68,
0x1868: 68,
0x1869: 68,
0x186a: 68,
0x186b: 68,
0x186c: 68,
0x186d: 68,
0x186e: 68,
0x186f: 68,
0x1870: 68,
0x1871: 68,
0x1872: 68,
0x1873: 68,
0x1874: 68,
0x1875: 68,
0x1876: 68,
0x1877: 68,
0x1878: 68,
0x1880: 85,
0x1881: 85,
0x1882: 85,
0x1883: 85,
0x1884: 85,
0x1885: 84,
0x1886: 84,
0x1887: 68,
0x1888: 68,
0x1889: 68,
0x188a: 68,
0x188b: 68,
0x188c: 68,
0x188d: 68,
0x188e: 68,
0x188f: 68,
0x1890: 68,
0x1891: 68,
0x1892: 68,
0x1893: 68,
0x1894: 68,
0x1895: 68,
0x1896: 68,
0x1897: 68,
0x1898: 68,
0x1899: 68,
0x189a: 68,
0x189b: 68,
0x189c: 68,
0x189d: 68,
0x189e: 68,
0x189f: 68,
0x18a0: 68,
0x18a1: 68,
0x18a2: 68,
0x18a3: 68,
0x18a4: 68,
0x18a5: 68,
0x18a6: 68,
0x18a7: 68,
0x18a8: 68,
0x18aa: 68,
0x200c: 85,
0x200d: 67,
0x202f: 85,
0x2066: 85,
0x2067: 85,
0x2068: 85,
0x2069: 85,
0xa840: 68,
0xa841: 68,
0xa842: 68,
0xa843: 68,
0xa844: 68,
0xa845: 68,
0xa846: 68,
0xa847: 68,
0xa848: 68,
0xa849: 68,
0xa84a: 68,
0xa84b: 68,
0xa84c: 68,
0xa84d: 68,
0xa84e: 68,
0xa84f: 68,
0xa850: 68,
0xa851: 68,
0xa852: 68,
0xa853: 68,
0xa854: 68,
0xa855: 68,
0xa856: 68,
0xa857: 68,
0xa858: 68,
0xa859: 68,
0xa85a: 68,
0xa85b: 68,
0xa85c: 68,
0xa85d: 68,
0xa85e: 68,
0xa85f: 68,
0xa860: 68,
0xa861: 68,
0xa862: 68,
0xa863: 68,
0xa864: 68,
0xa865: 68,
0xa866: 68,
0xa867: 68,
0xa868: 68,
0xa869: 68,
0xa86a: 68,
0xa86b: 68,
0xa86c: 68,
0xa86d: 68,
0xa86e: 68,
0xa86f: 68,
0xa870: 68,
0xa871: 68,
0xa872: 76,
0xa873: 85,
0x10ac0: 68,
0x10ac1: 68,
0x10ac2: 68,
0x10ac3: 68,
0x10ac4: 68,
0x10ac5: 82,
0x10ac6: 85,
0x10ac7: 82,
0x10ac8: 85,
0x10ac9: 82,
0x10aca: 82,
0x10acb: 85,
0x10acc: 85,
0x10acd: 76,
0x10ace: 82,
0x10acf: 82,
0x10ad0: 82,
0x10ad1: 82,
0x10ad2: 82,
0x10ad3: 68,
0x10ad4: 68,
0x10ad5: 68,
0x10ad6: 68,
0x10ad7: 76,
0x10ad8: 68,
0x10ad9: 68,
0x10ada: 68,
0x10adb: 68,
0x10adc: 68,
0x10add: 82,
0x10ade: 68,
0x10adf: 68,
0x10ae0: 68,
0x10ae1: 82,
0x10ae2: 85,
0x10ae3: 85,
0x10ae4: 82,
0x10aeb: 68,
0x10aec: 68,
0x10aed: 68,
0x10aee: 68,
0x10aef: 82,
0x10b80: 68,
0x10b81: 82,
0x10b82: 68,
0x10b83: 82,
0x10b84: 82,
0x10b85: 82,
0x10b86: 68,
0x10b87: 68,
0x10b88: 68,
0x10b89: 82,
0x10b8a: 68,
0x10b8b: 68,
0x10b8c: 82,
0x10b8d: 68,
0x10b8e: 82,
0x10b8f: 82,
0x10b90: 68,
0x10b91: 82,
0x10ba9: 82,
0x10baa: 82,
0x10bab: 82,
0x10bac: 82,
0x10bad: 68,
0x10bae: 68,
0x10baf: 85,
0x10d00: 76,
0x10d01: 68,
0x10d02: 68,
0x10d03: 68,
0x10d04: 68,
0x10d05: 68,
0x10d06: 68,
0x10d07: 68,
0x10d08: 68,
0x10d09: 68,
0x10d0a: 68,
0x10d0b: 68,
0x10d0c: 68,
0x10d0d: 68,
0x10d0e: 68,
0x10d0f: 68,
0x10d10: 68,
0x10d11: 68,
0x10d12: 68,
0x10d13: 68,
0x10d14: 68,
0x10d15: 68,
0x10d16: 68,
0x10d17: 68,
0x10d18: 68,
0x10d19: 68,
0x10d1a: 68,
0x10d1b: 68,
0x10d1c: 68,
0x10d1d: 68,
0x10d1e: 68,
0x10d1f: 68,
0x10d20: 68,
0x10d21: 68,
0x10d22: 82,
0x10d23: 68,
0x10f30: 68,
0x10f31: 68,
0x10f32: 68,
0x10f33: 82,
0x10f34: 68,
0x10f35: 68,
0x10f36: 68,
0x10f37: 68,
0x10f38: 68,
0x10f39: 68,
0x10f3a: 68,
0x10f3b: 68,
0x10f3c: 68,
0x10f3d: 68,
0x10f3e: 68,
0x10f3f: 68,
0x10f40: 68,
0x10f41: 68,
0x10f42: 68,
0x10f43: 68,
0x10f44: 68,
0x10f45: 85,
0x10f51: 68,
0x10f52: 68,
0x10f53: 68,
0x10f54: 82,
0x110bd: 85,
0x110cd: 85,
0x1e900: 68,
0x1e901: 68,
0x1e902: 68,
0x1e903: 68,
0x1e904: 68,
0x1e905: 68,
0x1e906: 68,
0x1e907: 68,
0x1e908: 68,
0x1e909: 68,
0x1e90a: 68,
0x1e90b: 68,
0x1e90c: 68,
0x1e90d: 68,
0x1e90e: 68,
0x1e90f: 68,
0x1e910: 68,
0x1e911: 68,
0x1e912: 68,
0x1e913: 68,
0x1e914: 68,
0x1e915: 68,
0x1e916: 68,
0x1e917: 68,
0x1e918: 68,
0x1e919: 68,
0x1e91a: 68,
0x1e91b: 68,
0x1e91c: 68,
0x1e91d: 68,
0x1e91e: 68,
0x1e91f: 68,
0x1e920: 68,
0x1e921: 68,
0x1e922: 68,
0x1e923: 68,
0x1e924: 68,
0x1e925: 68,
0x1e926: 68,
0x1e927: 68,
0x1e928: 68,
0x1e929: 68,
0x1e92a: 68,
0x1e92b: 68,
0x1e92c: 68,
0x1e92d: 68,
0x1e92e: 68,
0x1e92f: 68,
0x1e930: 68,
0x1e931: 68,
0x1e932: 68,
0x1e933: 68,
0x1e934: 68,
0x1e935: 68,
0x1e936: 68,
0x1e937: 68,
0x1e938: 68,
0x1e939: 68,
0x1e93a: 68,
0x1e93b: 68,
0x1e93c: 68,
0x1e93d: 68,
0x1e93e: 68,
0x1e93f: 68,
0x1e940: 68,
0x1e941: 68,
0x1e942: 68,
0x1e943: 68,
}
codepoint_classes = {
'PVALID': (
0x2d0000002e,
0x300000003a,
0x610000007b,
0xdf000000f7,
0xf800000100,
0x10100000102,
0x10300000104,
0x10500000106,
0x10700000108,
0x1090000010a,
0x10b0000010c,
0x10d0000010e,
0x10f00000110,
0x11100000112,
0x11300000114,
0x11500000116,
0x11700000118,
0x1190000011a,
0x11b0000011c,
0x11d0000011e,
0x11f00000120,
0x12100000122,
0x12300000124,
0x12500000126,
0x12700000128,
0x1290000012a,
0x12b0000012c,
0x12d0000012e,
0x12f00000130,
0x13100000132,
0x13500000136,
0x13700000139,
0x13a0000013b,
0x13c0000013d,
0x13e0000013f,
0x14200000143,
0x14400000145,
0x14600000147,
0x14800000149,
0x14b0000014c,
0x14d0000014e,
0x14f00000150,
0x15100000152,
0x15300000154,
0x15500000156,
0x15700000158,
0x1590000015a,
0x15b0000015c,
0x15d0000015e,
0x15f00000160,
0x16100000162,
0x16300000164,
0x16500000166,
0x16700000168,
0x1690000016a,
0x16b0000016c,
0x16d0000016e,
0x16f00000170,
0x17100000172,
0x17300000174,
0x17500000176,
0x17700000178,
0x17a0000017b,
0x17c0000017d,
0x17e0000017f,
0x18000000181,
0x18300000184,
0x18500000186,
0x18800000189,
0x18c0000018e,
0x19200000193,
0x19500000196,
0x1990000019c,
0x19e0000019f,
0x1a1000001a2,
0x1a3000001a4,
0x1a5000001a6,
0x1a8000001a9,
0x1aa000001ac,
0x1ad000001ae,
0x1b0000001b1,
0x1b4000001b5,
0x1b6000001b7,
0x1b9000001bc,
0x1bd000001c4,
0x1ce000001cf,
0x1d0000001d1,
0x1d2000001d3,
0x1d4000001d5,
0x1d6000001d7,
0x1d8000001d9,
0x1da000001db,
0x1dc000001de,
0x1df000001e0,
0x1e1000001e2,
0x1e3000001e4,
0x1e5000001e6,
0x1e7000001e8,
0x1e9000001ea,
0x1eb000001ec,
0x1ed000001ee,
0x1ef000001f1,
0x1f5000001f6,
0x1f9000001fa,
0x1fb000001fc,
0x1fd000001fe,
0x1ff00000200,
0x20100000202,
0x20300000204,
0x20500000206,
0x20700000208,
0x2090000020a,
0x20b0000020c,
0x20d0000020e,
0x20f00000210,
0x21100000212,
0x21300000214,
0x21500000216,
0x21700000218,
0x2190000021a,
0x21b0000021c,
0x21d0000021e,
0x21f00000220,
0x22100000222,
0x22300000224,
0x22500000226,
0x22700000228,
0x2290000022a,
0x22b0000022c,
0x22d0000022e,
0x22f00000230,
0x23100000232,
0x2330000023a,
0x23c0000023d,
0x23f00000241,
0x24200000243,
0x24700000248,
0x2490000024a,
0x24b0000024c,
0x24d0000024e,
0x24f000002b0,
0x2b9000002c2,
0x2c6000002d2,
0x2ec000002ed,
0x2ee000002ef,
0x30000000340,
0x34200000343,
0x3460000034f,
0x35000000370,
0x37100000372,
0x37300000374,
0x37700000378,
0x37b0000037e,
0x39000000391,
0x3ac000003cf,
0x3d7000003d8,
0x3d9000003da,
0x3db000003dc,
0x3dd000003de,
0x3df000003e0,
0x3e1000003e2,
0x3e3000003e4,
0x3e5000003e6,
0x3e7000003e8,
0x3e9000003ea,
0x3eb000003ec,
0x3ed000003ee,
0x3ef000003f0,
0x3f3000003f4,
0x3f8000003f9,
0x3fb000003fd,
0x43000000460,
0x46100000462,
0x46300000464,
0x46500000466,
0x46700000468,
0x4690000046a,
0x46b0000046c,
0x46d0000046e,
0x46f00000470,
0x47100000472,
0x47300000474,
0x47500000476,
0x47700000478,
0x4790000047a,
0x47b0000047c,
0x47d0000047e,
0x47f00000480,
0x48100000482,
0x48300000488,
0x48b0000048c,
0x48d0000048e,
0x48f00000490,
0x49100000492,
0x49300000494,
0x49500000496,
0x49700000498,
0x4990000049a,
0x49b0000049c,
0x49d0000049e,
0x49f000004a0,
0x4a1000004a2,
0x4a3000004a4,
0x4a5000004a6,
0x4a7000004a8,
0x4a9000004aa,
0x4ab000004ac,
0x4ad000004ae,
0x4af000004b0,
0x4b1000004b2,
0x4b3000004b4,
0x4b5000004b6,
0x4b7000004b8,
0x4b9000004ba,
0x4bb000004bc,
0x4bd000004be,
0x4bf000004c0,
0x4c2000004c3,
0x4c4000004c5,
0x4c6000004c7,
0x4c8000004c9,
0x4ca000004cb,
0x4cc000004cd,
0x4ce000004d0,
0x4d1000004d2,
0x4d3000004d4,
0x4d5000004d6,
0x4d7000004d8,
0x4d9000004da,
0x4db000004dc,
0x4dd000004de,
0x4df000004e0,
0x4e1000004e2,
0x4e3000004e4,
0x4e5000004e6,
0x4e7000004e8,
0x4e9000004ea,
0x4eb000004ec,
0x4ed000004ee,
0x4ef000004f0,
0x4f1000004f2,
0x4f3000004f4,
0x4f5000004f6,
0x4f7000004f8,
0x4f9000004fa,
0x4fb000004fc,
0x4fd000004fe,
0x4ff00000500,
0x50100000502,
0x50300000504,
0x50500000506,
0x50700000508,
0x5090000050a,
0x50b0000050c,
0x50d0000050e,
0x50f00000510,
0x51100000512,
0x51300000514,
0x51500000516,
0x51700000518,
0x5190000051a,
0x51b0000051c,
0x51d0000051e,
0x51f00000520,
0x52100000522,
0x52300000524,
0x52500000526,
0x52700000528,
0x5290000052a,
0x52b0000052c,
0x52d0000052e,
0x52f00000530,
0x5590000055a,
0x56000000587,
0x58800000589,
0x591000005be,
0x5bf000005c0,
0x5c1000005c3,
0x5c4000005c6,
0x5c7000005c8,
0x5d0000005eb,
0x5ef000005f3,
0x6100000061b,
0x62000000640,
0x64100000660,
0x66e00000675,
0x679000006d4,
0x6d5000006dd,
0x6df000006e9,
0x6ea000006f0,
0x6fa00000700,
0x7100000074b,
0x74d000007b2,
0x7c0000007f6,
0x7fd000007fe,
0x8000000082e,
0x8400000085c,
0x8600000086b,
0x8a0000008b5,
0x8b6000008be,
0x8d3000008e2,
0x8e300000958,
0x96000000964,
0x96600000970,
0x97100000984,
0x9850000098d,
0x98f00000991,
0x993000009a9,
0x9aa000009b1,
0x9b2000009b3,
0x9b6000009ba,
0x9bc000009c5,
0x9c7000009c9,
0x9cb000009cf,
0x9d7000009d8,
0x9e0000009e4,
0x9e6000009f2,
0x9fc000009fd,
0x9fe000009ff,
0xa0100000a04,
0xa0500000a0b,
0xa0f00000a11,
0xa1300000a29,
0xa2a00000a31,
0xa3200000a33,
0xa3500000a36,
0xa3800000a3a,
0xa3c00000a3d,
0xa3e00000a43,
0xa4700000a49,
0xa4b00000a4e,
0xa5100000a52,
0xa5c00000a5d,
0xa6600000a76,
0xa8100000a84,
0xa8500000a8e,
0xa8f00000a92,
0xa9300000aa9,
0xaaa00000ab1,
0xab200000ab4,
0xab500000aba,
0xabc00000ac6,
0xac700000aca,
0xacb00000ace,
0xad000000ad1,
0xae000000ae4,
0xae600000af0,
0xaf900000b00,
0xb0100000b04,
0xb0500000b0d,
0xb0f00000b11,
0xb1300000b29,
0xb2a00000b31,
0xb3200000b34,
0xb3500000b3a,
0xb3c00000b45,
0xb4700000b49,
0xb4b00000b4e,
0xb5600000b58,
0xb5f00000b64,
0xb6600000b70,
0xb7100000b72,
0xb8200000b84,
0xb8500000b8b,
0xb8e00000b91,
0xb9200000b96,
0xb9900000b9b,
0xb9c00000b9d,
0xb9e00000ba0,
0xba300000ba5,
0xba800000bab,
0xbae00000bba,
0xbbe00000bc3,
0xbc600000bc9,
0xbca00000bce,
0xbd000000bd1,
0xbd700000bd8,
0xbe600000bf0,
0xc0000000c0d,
0xc0e00000c11,
0xc1200000c29,
0xc2a00000c3a,
0xc3d00000c45,
0xc4600000c49,
0xc4a00000c4e,
0xc5500000c57,
0xc5800000c5b,
0xc6000000c64,
0xc6600000c70,
0xc8000000c84,
0xc8500000c8d,
0xc8e00000c91,
0xc9200000ca9,
0xcaa00000cb4,
0xcb500000cba,
0xcbc00000cc5,
0xcc600000cc9,
0xcca00000cce,
0xcd500000cd7,
0xcde00000cdf,
0xce000000ce4,
0xce600000cf0,
0xcf100000cf3,
0xd0000000d04,
0xd0500000d0d,
0xd0e00000d11,
0xd1200000d45,
0xd4600000d49,
0xd4a00000d4f,
0xd5400000d58,
0xd5f00000d64,
0xd6600000d70,
0xd7a00000d80,
0xd8200000d84,
0xd8500000d97,
0xd9a00000db2,
0xdb300000dbc,
0xdbd00000dbe,
0xdc000000dc7,
0xdca00000dcb,
0xdcf00000dd5,
0xdd600000dd7,
0xdd800000de0,
0xde600000df0,
0xdf200000df4,
0xe0100000e33,
0xe3400000e3b,
0xe4000000e4f,
0xe5000000e5a,
0xe8100000e83,
0xe8400000e85,
0xe8700000e89,
0xe8a00000e8b,
0xe8d00000e8e,
0xe9400000e98,
0xe9900000ea0,
0xea100000ea4,
0xea500000ea6,
0xea700000ea8,
0xeaa00000eac,
0xead00000eb3,
0xeb400000eba,
0xebb00000ebe,
0xec000000ec5,
0xec600000ec7,
0xec800000ece,
0xed000000eda,
0xede00000ee0,
0xf0000000f01,
0xf0b00000f0c,
0xf1800000f1a,
0xf2000000f2a,
0xf3500000f36,
0xf3700000f38,
0xf3900000f3a,
0xf3e00000f43,
0xf4400000f48,
0xf4900000f4d,
0xf4e00000f52,
0xf5300000f57,
0xf5800000f5c,
0xf5d00000f69,
0xf6a00000f6d,
0xf7100000f73,
0xf7400000f75,
0xf7a00000f81,
0xf8200000f85,
0xf8600000f93,
0xf9400000f98,
0xf9900000f9d,
0xf9e00000fa2,
0xfa300000fa7,
0xfa800000fac,
0xfad00000fb9,
0xfba00000fbd,
0xfc600000fc7,
0x10000000104a,
0x10500000109e,
0x10d0000010fb,
0x10fd00001100,
0x120000001249,
0x124a0000124e,
0x125000001257,
0x125800001259,
0x125a0000125e,
0x126000001289,
0x128a0000128e,
0x1290000012b1,
0x12b2000012b6,
0x12b8000012bf,
0x12c0000012c1,
0x12c2000012c6,
0x12c8000012d7,
0x12d800001311,
0x131200001316,
0x13180000135b,
0x135d00001360,
0x138000001390,
0x13a0000013f6,
0x14010000166d,
0x166f00001680,
0x16810000169b,
0x16a0000016eb,
0x16f1000016f9,
0x17000000170d,
0x170e00001715,
0x172000001735,
0x174000001754,
0x17600000176d,
0x176e00001771,
0x177200001774,
0x1780000017b4,
0x17b6000017d4,
0x17d7000017d8,
0x17dc000017de,
0x17e0000017ea,
0x18100000181a,
0x182000001879,
0x1880000018ab,
0x18b0000018f6,
0x19000000191f,
0x19200000192c,
0x19300000193c,
0x19460000196e,
0x197000001975,
0x1980000019ac,
0x19b0000019ca,
0x19d0000019da,
0x1a0000001a1c,
0x1a2000001a5f,
0x1a6000001a7d,
0x1a7f00001a8a,
0x1a9000001a9a,
0x1aa700001aa8,
0x1ab000001abe,
0x1b0000001b4c,
0x1b5000001b5a,
0x1b6b00001b74,
0x1b8000001bf4,
0x1c0000001c38,
0x1c4000001c4a,
0x1c4d00001c7e,
0x1cd000001cd3,
0x1cd400001cfa,
0x1d0000001d2c,
0x1d2f00001d30,
0x1d3b00001d3c,
0x1d4e00001d4f,
0x1d6b00001d78,
0x1d7900001d9b,
0x1dc000001dfa,
0x1dfb00001e00,
0x1e0100001e02,
0x1e0300001e04,
0x1e0500001e06,
0x1e0700001e08,
0x1e0900001e0a,
0x1e0b00001e0c,
0x1e0d00001e0e,
0x1e0f00001e10,
0x1e1100001e12,
0x1e1300001e14,
0x1e1500001e16,
0x1e1700001e18,
0x1e1900001e1a,
0x1e1b00001e1c,
0x1e1d00001e1e,
0x1e1f00001e20,
0x1e2100001e22,
0x1e2300001e24,
0x1e2500001e26,
0x1e2700001e28,
0x1e2900001e2a,
0x1e2b00001e2c,
0x1e2d00001e2e,
0x1e2f00001e30,
0x1e3100001e32,
0x1e3300001e34,
0x1e3500001e36,
0x1e3700001e38,
0x1e3900001e3a,
0x1e3b00001e3c,
0x1e3d00001e3e,
0x1e3f00001e40,
0x1e4100001e42,
0x1e4300001e44,
0x1e4500001e46,
0x1e4700001e48,
0x1e4900001e4a,
0x1e4b00001e4c,
0x1e4d00001e4e,
0x1e4f00001e50,
0x1e5100001e52,
0x1e5300001e54,
0x1e5500001e56,
0x1e5700001e58,
0x1e5900001e5a,
0x1e5b00001e5c,
0x1e5d00001e5e,
0x1e5f00001e60,
0x1e6100001e62,
0x1e6300001e64,
0x1e6500001e66,
0x1e6700001e68,
0x1e6900001e6a,
0x1e6b00001e6c,
0x1e6d00001e6e,
0x1e6f00001e70,
0x1e7100001e72,
0x1e7300001e74,
0x1e7500001e76,
0x1e7700001e78,
0x1e7900001e7a,
0x1e7b00001e7c,
0x1e7d00001e7e,
0x1e7f00001e80,
0x1e8100001e82,
0x1e8300001e84,
0x1e8500001e86,
0x1e8700001e88,
0x1e8900001e8a,
0x1e8b00001e8c,
0x1e8d00001e8e,
0x1e8f00001e90,
0x1e9100001e92,
0x1e9300001e94,
0x1e9500001e9a,
0x1e9c00001e9e,
0x1e9f00001ea0,
0x1ea100001ea2,
0x1ea300001ea4,
0x1ea500001ea6,
0x1ea700001ea8,
0x1ea900001eaa,
0x1eab00001eac,
0x1ead00001eae,
0x1eaf00001eb0,
0x1eb100001eb2,
0x1eb300001eb4,
0x1eb500001eb6,
0x1eb700001eb8,
0x1eb900001eba,
0x1ebb00001ebc,
0x1ebd00001ebe,
0x1ebf00001ec0,
0x1ec100001ec2,
0x1ec300001ec4,
0x1ec500001ec6,
0x1ec700001ec8,
0x1ec900001eca,
0x1ecb00001ecc,
0x1ecd00001ece,
0x1ecf00001ed0,
0x1ed100001ed2,
0x1ed300001ed4,
0x1ed500001ed6,
0x1ed700001ed8,
0x1ed900001eda,
0x1edb00001edc,
0x1edd00001ede,
0x1edf00001ee0,
0x1ee100001ee2,
0x1ee300001ee4,
0x1ee500001ee6,
0x1ee700001ee8,
0x1ee900001eea,
0x1eeb00001eec,
0x1eed00001eee,
0x1eef00001ef0,
0x1ef100001ef2,
0x1ef300001ef4,
0x1ef500001ef6,
0x1ef700001ef8,
0x1ef900001efa,
0x1efb00001efc,
0x1efd00001efe,
0x1eff00001f08,
0x1f1000001f16,
0x1f2000001f28,
0x1f3000001f38,
0x1f4000001f46,
0x1f5000001f58,
0x1f6000001f68,
0x1f7000001f71,
0x1f7200001f73,
0x1f7400001f75,
0x1f7600001f77,
0x1f7800001f79,
0x1f7a00001f7b,
0x1f7c00001f7d,
0x1fb000001fb2,
0x1fb600001fb7,
0x1fc600001fc7,
0x1fd000001fd3,
0x1fd600001fd8,
0x1fe000001fe3,
0x1fe400001fe8,
0x1ff600001ff7,
0x214e0000214f,
0x218400002185,
0x2c3000002c5f,
0x2c6100002c62,
0x2c6500002c67,
0x2c6800002c69,
0x2c6a00002c6b,
0x2c6c00002c6d,
0x2c7100002c72,
0x2c7300002c75,
0x2c7600002c7c,
0x2c8100002c82,
0x2c8300002c84,
0x2c8500002c86,
0x2c8700002c88,
0x2c8900002c8a,
0x2c8b00002c8c,
0x2c8d00002c8e,
0x2c8f00002c90,
0x2c9100002c92,
0x2c9300002c94,
0x2c9500002c96,
0x2c9700002c98,
0x2c9900002c9a,
0x2c9b00002c9c,
0x2c9d00002c9e,
0x2c9f00002ca0,
0x2ca100002ca2,
0x2ca300002ca4,
0x2ca500002ca6,
0x2ca700002ca8,
0x2ca900002caa,
0x2cab00002cac,
0x2cad00002cae,
0x2caf00002cb0,
0x2cb100002cb2,
0x2cb300002cb4,
0x2cb500002cb6,
0x2cb700002cb8,
0x2cb900002cba,
0x2cbb00002cbc,
0x2cbd00002cbe,
0x2cbf00002cc0,
0x2cc100002cc2,
0x2cc300002cc4,
0x2cc500002cc6,
0x2cc700002cc8,
0x2cc900002cca,
0x2ccb00002ccc,
0x2ccd00002cce,
0x2ccf00002cd0,
0x2cd100002cd2,
0x2cd300002cd4,
0x2cd500002cd6,
0x2cd700002cd8,
0x2cd900002cda,
0x2cdb00002cdc,
0x2cdd00002cde,
0x2cdf00002ce0,
0x2ce100002ce2,
0x2ce300002ce5,
0x2cec00002ced,
0x2cee00002cf2,
0x2cf300002cf4,
0x2d0000002d26,
0x2d2700002d28,
0x2d2d00002d2e,
0x2d3000002d68,
0x2d7f00002d97,
0x2da000002da7,
0x2da800002daf,
0x2db000002db7,
0x2db800002dbf,
0x2dc000002dc7,
0x2dc800002dcf,
0x2dd000002dd7,
0x2dd800002ddf,
0x2de000002e00,
0x2e2f00002e30,
0x300500003008,
0x302a0000302e,
0x303c0000303d,
0x304100003097,
0x30990000309b,
0x309d0000309f,
0x30a1000030fb,
0x30fc000030ff,
0x310500003130,
0x31a0000031bb,
0x31f000003200,
0x340000004db6,
0x4e0000009ff0,
0xa0000000a48d,
0xa4d00000a4fe,
0xa5000000a60d,
0xa6100000a62c,
0xa6410000a642,
0xa6430000a644,
0xa6450000a646,
0xa6470000a648,
0xa6490000a64a,
0xa64b0000a64c,
0xa64d0000a64e,
0xa64f0000a650,
0xa6510000a652,
0xa6530000a654,
0xa6550000a656,
0xa6570000a658,
0xa6590000a65a,
0xa65b0000a65c,
0xa65d0000a65e,
0xa65f0000a660,
0xa6610000a662,
0xa6630000a664,
0xa6650000a666,
0xa6670000a668,
0xa6690000a66a,
0xa66b0000a66c,
0xa66d0000a670,
0xa6740000a67e,
0xa67f0000a680,
0xa6810000a682,
0xa6830000a684,
0xa6850000a686,
0xa6870000a688,
0xa6890000a68a,
0xa68b0000a68c,
0xa68d0000a68e,
0xa68f0000a690,
0xa6910000a692,
0xa6930000a694,
0xa6950000a696,
0xa6970000a698,
0xa6990000a69a,
0xa69b0000a69c,
0xa69e0000a6e6,
0xa6f00000a6f2,
0xa7170000a720,
0xa7230000a724,
0xa7250000a726,
0xa7270000a728,
0xa7290000a72a,
0xa72b0000a72c,
0xa72d0000a72e,
0xa72f0000a732,
0xa7330000a734,
0xa7350000a736,
0xa7370000a738,
0xa7390000a73a,
0xa73b0000a73c,
0xa73d0000a73e,
0xa73f0000a740,
0xa7410000a742,
0xa7430000a744,
0xa7450000a746,
0xa7470000a748,
0xa7490000a74a,
0xa74b0000a74c,
0xa74d0000a74e,
0xa74f0000a750,
0xa7510000a752,
0xa7530000a754,
0xa7550000a756,
0xa7570000a758,
0xa7590000a75a,
0xa75b0000a75c,
0xa75d0000a75e,
0xa75f0000a760,
0xa7610000a762,
0xa7630000a764,
0xa7650000a766,
0xa7670000a768,
0xa7690000a76a,
0xa76b0000a76c,
0xa76d0000a76e,
0xa76f0000a770,
0xa7710000a779,
0xa77a0000a77b,
0xa77c0000a77d,
0xa77f0000a780,
0xa7810000a782,
0xa7830000a784,
0xa7850000a786,
0xa7870000a789,
0xa78c0000a78d,
0xa78e0000a790,
0xa7910000a792,
0xa7930000a796,
0xa7970000a798,
0xa7990000a79a,
0xa79b0000a79c,
0xa79d0000a79e,
0xa79f0000a7a0,
0xa7a10000a7a2,
0xa7a30000a7a4,
0xa7a50000a7a6,
0xa7a70000a7a8,
0xa7a90000a7aa,
0xa7af0000a7b0,
0xa7b50000a7b6,
0xa7b70000a7b8,
0xa7b90000a7ba,
0xa7f70000a7f8,
0xa7fa0000a828,
0xa8400000a874,
0xa8800000a8c6,
0xa8d00000a8da,
0xa8e00000a8f8,
0xa8fb0000a8fc,
0xa8fd0000a92e,
0xa9300000a954,
0xa9800000a9c1,
0xa9cf0000a9da,
0xa9e00000a9ff,
0xaa000000aa37,
0xaa400000aa4e,
0xaa500000aa5a,
0xaa600000aa77,
0xaa7a0000aac3,
0xaadb0000aade,
0xaae00000aaf0,
0xaaf20000aaf7,
0xab010000ab07,
0xab090000ab0f,
0xab110000ab17,
0xab200000ab27,
0xab280000ab2f,
0xab300000ab5b,
0xab600000ab66,
0xabc00000abeb,
0xabec0000abee,
0xabf00000abfa,
0xac000000d7a4,
0xfa0e0000fa10,
0xfa110000fa12,
0xfa130000fa15,
0xfa1f0000fa20,
0xfa210000fa22,
0xfa230000fa25,
0xfa270000fa2a,
0xfb1e0000fb1f,
0xfe200000fe30,
0xfe730000fe74,
0x100000001000c,
0x1000d00010027,
0x100280001003b,
0x1003c0001003e,
0x1003f0001004e,
0x100500001005e,
0x10080000100fb,
0x101fd000101fe,
0x102800001029d,
0x102a0000102d1,
0x102e0000102e1,
0x1030000010320,
0x1032d00010341,
0x103420001034a,
0x103500001037b,
0x103800001039e,
0x103a0000103c4,
0x103c8000103d0,
0x104280001049e,
0x104a0000104aa,
0x104d8000104fc,
0x1050000010528,
0x1053000010564,
0x1060000010737,
0x1074000010756,
0x1076000010768,
0x1080000010806,
0x1080800010809,
0x1080a00010836,
0x1083700010839,
0x1083c0001083d,
0x1083f00010856,
0x1086000010877,
0x108800001089f,
0x108e0000108f3,
0x108f4000108f6,
0x1090000010916,
0x109200001093a,
0x10980000109b8,
0x109be000109c0,
0x10a0000010a04,
0x10a0500010a07,
0x10a0c00010a14,
0x10a1500010a18,
0x10a1900010a36,
0x10a3800010a3b,
0x10a3f00010a40,
0x10a6000010a7d,
0x10a8000010a9d,
0x10ac000010ac8,
0x10ac900010ae7,
0x10b0000010b36,
0x10b4000010b56,
0x10b6000010b73,
0x10b8000010b92,
0x10c0000010c49,
0x10cc000010cf3,
0x10d0000010d28,
0x10d3000010d3a,
0x10f0000010f1d,
0x10f2700010f28,
0x10f3000010f51,
0x1100000011047,
0x1106600011070,
0x1107f000110bb,
0x110d0000110e9,
0x110f0000110fa,
0x1110000011135,
0x1113600011140,
0x1114400011147,
0x1115000011174,
0x1117600011177,
0x11180000111c5,
0x111c9000111cd,
0x111d0000111db,
0x111dc000111dd,
0x1120000011212,
0x1121300011238,
0x1123e0001123f,
0x1128000011287,
0x1128800011289,
0x1128a0001128e,
0x1128f0001129e,
0x1129f000112a9,
0x112b0000112eb,
0x112f0000112fa,
0x1130000011304,
0x113050001130d,
0x1130f00011311,
0x1131300011329,
0x1132a00011331,
0x1133200011334,
0x113350001133a,
0x1133b00011345,
0x1134700011349,
0x1134b0001134e,
0x1135000011351,
0x1135700011358,
0x1135d00011364,
0x113660001136d,
0x1137000011375,
0x114000001144b,
0x114500001145a,
0x1145e0001145f,
0x11480000114c6,
0x114c7000114c8,
0x114d0000114da,
0x11580000115b6,
0x115b8000115c1,
0x115d8000115de,
0x1160000011641,
0x1164400011645,
0x116500001165a,
0x11680000116b8,
0x116c0000116ca,
0x117000001171b,
0x1171d0001172c,
0x117300001173a,
0x118000001183b,
0x118c0000118ea,
0x118ff00011900,
0x11a0000011a3f,
0x11a4700011a48,
0x11a5000011a84,
0x11a8600011a9a,
0x11a9d00011a9e,
0x11ac000011af9,
0x11c0000011c09,
0x11c0a00011c37,
0x11c3800011c41,
0x11c5000011c5a,
0x11c7200011c90,
0x11c9200011ca8,
0x11ca900011cb7,
0x11d0000011d07,
0x11d0800011d0a,
0x11d0b00011d37,
0x11d3a00011d3b,
0x11d3c00011d3e,
0x11d3f00011d48,
0x11d5000011d5a,
0x11d6000011d66,
0x11d6700011d69,
0x11d6a00011d8f,
0x11d9000011d92,
0x11d9300011d99,
0x11da000011daa,
0x11ee000011ef7,
0x120000001239a,
0x1248000012544,
0x130000001342f,
0x1440000014647,
0x1680000016a39,
0x16a4000016a5f,
0x16a6000016a6a,
0x16ad000016aee,
0x16af000016af5,
0x16b0000016b37,
0x16b4000016b44,
0x16b5000016b5a,
0x16b6300016b78,
0x16b7d00016b90,
0x16e6000016e80,
0x16f0000016f45,
0x16f5000016f7f,
0x16f8f00016fa0,
0x16fe000016fe2,
0x17000000187f2,
0x1880000018af3,
0x1b0000001b11f,
0x1b1700001b2fc,
0x1bc000001bc6b,
0x1bc700001bc7d,
0x1bc800001bc89,
0x1bc900001bc9a,
0x1bc9d0001bc9f,
0x1da000001da37,
0x1da3b0001da6d,
0x1da750001da76,
0x1da840001da85,
0x1da9b0001daa0,
0x1daa10001dab0,
0x1e0000001e007,
0x1e0080001e019,
0x1e01b0001e022,
0x1e0230001e025,
0x1e0260001e02b,
0x1e8000001e8c5,
0x1e8d00001e8d7,
0x1e9220001e94b,
0x1e9500001e95a,
0x200000002a6d7,
0x2a7000002b735,
0x2b7400002b81e,
0x2b8200002cea2,
0x2ceb00002ebe1,
),
'CONTEXTJ': (
0x200c0000200e,
),
'CONTEXTO': (
0xb7000000b8,
0x37500000376,
0x5f3000005f5,
0x6600000066a,
0x6f0000006fa,
0x30fb000030fc,
),
}
| gpl-3.0 |
ilyaraz/FALCONN | external/googletest/googletest/test/gtest_xml_test_utils.py | 364 | 8872 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for gtest_xml_output"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import re
from xml.dom import minidom, Node
import gtest_test_utils
GTEST_OUTPUT_FLAG = '--gtest_output'
GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'
class GTestXMLTestCase(gtest_test_utils.TestCase):
"""
Base class for tests of Google Test's XML output functionality.
"""
def AssertEquivalentNodes(self, expected_node, actual_node):
"""
Asserts that actual_node (a DOM node object) is equivalent to
expected_node (another DOM node object), in that either both of
them are CDATA nodes and have the same value, or both are DOM
elements and actual_node meets all of the following conditions:
* It has the same tag name as expected_node.
* It has the same set of attributes as expected_node, each with
the same value as the corresponding attribute of expected_node.
Exceptions are any attribute named "time", which needs only be
convertible to a floating-point number and any attribute named
"type_param" which only has to be non-empty.
* It has an equivalent set of child nodes (including elements and
CDATA sections) as expected_node. Note that we ignore the
order of the children as they are not guaranteed to be in any
particular order.
"""
if expected_node.nodeType == Node.CDATA_SECTION_NODE:
self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)
self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)
return
self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)
self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)
self.assertEquals(expected_node.tagName, actual_node.tagName)
expected_attributes = expected_node.attributes
actual_attributes = actual_node .attributes
self.assertEquals(
expected_attributes.length, actual_attributes.length,
'attribute numbers differ in element %s:\nExpected: %r\nActual: %r' % (
actual_node.tagName, expected_attributes.keys(),
actual_attributes.keys()))
for i in range(expected_attributes.length):
expected_attr = expected_attributes.item(i)
actual_attr = actual_attributes.get(expected_attr.name)
self.assert_(
actual_attr is not None,
'expected attribute %s not found in element %s' %
(expected_attr.name, actual_node.tagName))
self.assertEquals(
expected_attr.value, actual_attr.value,
' values of attribute %s in element %s differ: %s vs %s' %
(expected_attr.name, actual_node.tagName,
expected_attr.value, actual_attr.value))
expected_children = self._GetChildren(expected_node)
actual_children = self._GetChildren(actual_node)
self.assertEquals(
len(expected_children), len(actual_children),
'number of child elements differ in element ' + actual_node.tagName)
for child_id, child in expected_children.items():
self.assert_(child_id in actual_children,
'<%s> is not in <%s> (in element %s)' %
(child_id, actual_children, actual_node.tagName))
self.AssertEquivalentNodes(child, actual_children[child_id])
identifying_attribute = {
'testsuites': 'name',
'testsuite': 'name',
'testcase': 'name',
'failure': 'message',
}
def _GetChildren(self, element):
"""
Fetches all of the child nodes of element, a DOM Element object.
Returns them as the values of a dictionary keyed by the IDs of the
children. For <testsuites>, <testsuite> and <testcase> elements, the ID
is the value of their "name" attribute; for <failure> elements, it is
the value of the "message" attribute; CDATA sections and non-whitespace
text nodes are concatenated into a single CDATA section with ID
"detail". An exception is raised if any element other than the above
four is encountered, if two child elements with the same identifying
attributes are encountered, or if any other type of node is encountered.
"""
children = {}
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.assert_(child.tagName in self.identifying_attribute,
'Encountered unknown element <%s>' % child.tagName)
childID = child.getAttribute(self.identifying_attribute[child.tagName])
self.assert_(childID not in children)
children[childID] = child
elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
if 'detail' not in children:
if (child.nodeType == Node.CDATA_SECTION_NODE or
not child.nodeValue.isspace()):
children['detail'] = child.ownerDocument.createCDATASection(
child.nodeValue)
else:
children['detail'].nodeValue += child.nodeValue
else:
self.fail('Encountered unexpected node type %d' % child.nodeType)
return children
def NormalizeXml(self, element):
"""
Normalizes Google Test's XML output to eliminate references to transient
information that may change from run to run.
* The "time" attribute of <testsuites>, <testsuite> and <testcase>
elements is replaced with a single asterisk, if it contains
only digit characters.
* The "timestamp" attribute of <testsuites> elements is replaced with a
single asterisk, if it contains a valid ISO8601 datetime value.
* The "type_param" attribute of <testcase> elements is replaced with a
single asterisk (if it sn non-empty) as it is the type name returned
by the compiler and is platform dependent.
* The line info reported in the first line of the "message"
attribute and CDATA section of <failure> elements is replaced with the
file's basename and a single asterisk for the line number.
* The directory names in file paths are removed.
* The stack traces are removed.
"""
if element.tagName == 'testsuites':
timestamp = element.getAttributeNode('timestamp')
timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d$',
'*', timestamp.value)
if element.tagName in ('testsuites', 'testsuite', 'testcase'):
time = element.getAttributeNode('time')
time.value = re.sub(r'^\d+(\.\d+)?$', '*', time.value)
type_param = element.getAttributeNode('type_param')
if type_param and type_param.value:
type_param.value = '*'
elif element.tagName == 'failure':
source_line_pat = r'^.*[/\\](.*:)\d+\n'
# Replaces the source line information with a normalized form.
message = element.getAttributeNode('message')
message.value = re.sub(source_line_pat, '\\1*\n', message.value)
for child in element.childNodes:
if child.nodeType == Node.CDATA_SECTION_NODE:
# Replaces the source line information with a normalized form.
cdata = re.sub(source_line_pat, '\\1*\n', child.nodeValue)
# Removes the actual stack trace.
child.nodeValue = re.sub(r'\nStack trace:\n(.|\n)*',
'', cdata)
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.NormalizeXml(child)
| mit |
Tsjerk/MartiniTools | gmx/trr/trr.py | 1 | 3966 |
"""
Handle Gromacs' TRR file format - read in stuff as numpy arrays
(c)2014 Tsjerk A. Wassenaar
"""
import struct, sys, numpy
from frame import TRRFrame
class TrrReadError(Exception):
def __init__(self,msg):
self.msg = msg
def __str__(self):
return self.msg
class TRR:
"""Class for reading Gromacs' TRR trajectory files"""
# First 24 bytes of each frame should be
# GMXMAGIC LINEFEED TITLELEN TITLE(12)
# |-----1993-----||-----13-----||------12------||----------|
# _tag = b'\x00\x00\x07\xc9\x00\x00\x00\r\x00\x00\x00\x0cGMX_trn_file'
# _tagLen = len(_tag)
def __init__(self,stream,offset=0,dim=3):
if type(stream) == str:
self.stream = open(stream,'rb',buffering=0)
else:
self.stream = stream
# Check if the stream is opened correctly
if not 'b' in self.stream.mode or hasattr(self.stream,"read1"):
raise TrrReadError("TRR file stream should be opened with unbuffered binary mode.")
# Check format and read some info about trajectory
# Read in a chunk to make sure we have a complete title
header = self.stream.read(1024)
if header[:8] != b'\x00\x00\x07\xc9\x00\x00\x00\r': # 1993\r
raise TrrReadError("Invalid magic number. Probably not a TRR file, or corrupted.")
self.taglen = 12 + struct.unpack('>l',header[8:12])[0]
self.tag = header[:self.taglen]
stuff = struct.unpack('>lllllllllllll',header[self.taglen:self.taglen+52])
box = stuff[2]
self.atoms = stuff[10]
self.float = box//(dim*dim) or (self.x or self.v or self.f)//self.block
self.dtype = (self.float == 4 and "f") or (self.float == 8 and "d")
self.dtypeE = '>' + self.dtype # With Endianness
self.hstr = ">"+13*"l"+2*self.dtype # >lllllllllllllff
self.hsize = 52 + 2*self.float
self.index = []
self.nframes = 0
self.dim = dim
# Find the end
self.stream.seek(0,2)
self.size = self.stream.tell()
# Wind the trajectory to the offset
self.stream.seek(offset,0)
self.pos = offset
def __len__(self):
if not self.nframes:
start = self.pos
for i in self:
self.nframes += 1
self.stream.seek(start,0)
self.pos = start
self.index = []
return self.nframes
def __del__(self):
"""Close file if iterator is stopped. Allows: ref = TRR().next()"""
self.stream.close()
def __iter__(self):
return self
def next(self):
if self.pos >= self.size:
raise StopIteration
offset = self.pos
if self.stream.tell() != self.pos:
self.stream.seek(self.pos)
hsize = self.taglen+self.hsize
header = self.stream.read(hsize)
if header[:8] != b'\x00\x00\x07\xc9\x00\x00\x00\r': # 1993\r
# Not a proper tag. Broken frame?
raise StopIteration
stuff = struct.unpack(self.hstr,header[-self.hsize:])
bytesize = self.taglen + 52 + sum(stuff[:10]) + 2*self.float # Size of complete frame
time,lmb = stuff[-2:]
# Lengths and positions of arrays
box = (self.dim**2, self.pos+hsize+sum(stuff[:2]))
x = (stuff[7]//self.float, self.pos+hsize+sum(stuff[:7]))
v = (stuff[8]//self.float, self.pos+hsize+sum(stuff[:8]))
f = (stuff[9]//self.float, self.pos+hsize+sum(stuff[:9]))
self.pos += bytesize
self.index.append(TRRFrame(self,nr=len(self.index),offset=offset,time=time,lmb=lmb,box=box,x=x,v=v,f=f))
# Go to next frame
self.stream.seek(self.pos)
return self.index[-1]
def close(self):
self.stream.close()
| gpl-2.0 |
bdh1011/wau | venv/lib/python2.7/site-packages/requests/packages/chardet/langhungarianmodel.py | 2763 | 12536 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
| mit |
zzxuanyuan/root-compressor-dummy | tutorials/pyroot/tornado.py | 49 | 1487 | ## \file
## \ingroup tutorial_pyroot
## Tornado example.
## \notebook
##
## \macro_image
## \macro_code
##
## \author Wim Lavrijsen
from ROOT import TCanvas, TView, TPolyMarker3D, TPaveText
from ROOT import gROOT, gBenchmark
from math import cos, sin, pi
gBenchmark.Start( 'tornado' )
d = 16
numberOfPoints = 200
numberOfCircles = 40
# create and open a canvas
sky = TCanvas( 'sky', 'Tornado', 300, 10, 700, 500 )
sky.SetFillColor( 14 )
# creating view
view = TView.CreateView()
rng = numberOfCircles * d
view.SetRange( 0, 0, 0, 4.0*rng, 2.0*rng, rng )
polymarkers = []
for j in range( d, numberOfCircles * d, d ):
# create a PolyMarker3D
pm3d = TPolyMarker3D( numberOfPoints )
# set points
for i in range( 1, numberOfPoints ) :
csin = sin( 2*pi / numberOfPoints * i ) + 1
ccos = cos( 2*pi / numberOfPoints * i ) + 1
esin = sin( 2*pi / (numberOfCircles*d) * j ) + 1
x = j * ( csin + esin );
y = j * ccos;
z = j;
pm3d.SetPoint( i, x, y, z );
# set marker size, color & style
pm3d.SetMarkerSize( 1 )
pm3d.SetMarkerColor( 2 + ( d == ( j & d ) ) )
pm3d.SetMarkerStyle( 3 )
# draw
pm3d.Draw()
# save a reference
polymarkers.append( pm3d )
gBenchmark.Show( 'tornado' )
ct = gBenchmark.GetCpuTime( 'tornado' )
timeStr = 'Execution time: %g sec.' % ct
text = TPaveText( 0.1, 0.81, 0.9, 0.97 )
text.SetFillColor( 42 )
text.AddText( 'PyROOT example: tornado.py' )
text.AddText( timeStr )
text.Draw()
sky.Update()
| lgpl-2.1 |
rldhont/Quantum-GIS | python/plugins/processing/algs/qgis/ui/RasterCalculatorWidgets.py | 25 | 11647 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RasterCalculatorWidgets.py
---------------------
Date : November 2016
Copyright : (C) 2016 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2016'
__copyright__ = '(C) 2016, Victor Olaya'
import os
from functools import partial
import re
import json
from qgis.utils import iface
from qgis.PyQt import uic
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtGui import QTextCursor
from qgis.PyQt.QtWidgets import (QLineEdit, QPushButton, QLabel,
QComboBox, QSpacerItem, QSizePolicy,
QListWidgetItem)
from qgis.core import (QgsProcessingUtils,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingOutputRasterLayer,
QgsProject)
from processing.gui.wrappers import WidgetWrapper, DIALOG_STANDARD, DIALOG_BATCH
from processing.gui.BatchInputSelectionPanel import BatchInputSelectionPanel
from processing.tools import dataobjects
from processing.tools.system import userFolder
from processing.gui.wrappers import InvalidParameterValue
from qgis.analysis import QgsRasterCalculatorEntry, QgsRasterCalcNode
pluginPath = os.path.dirname(__file__)
WIDGET_ADD_NEW, BASE_ADD_NEW = uic.loadUiType(
os.path.join(pluginPath, 'AddNewExpressionDialog.ui'))
class AddNewExpressionDialog(BASE_ADD_NEW, WIDGET_ADD_NEW):
def __init__(self, expression):
super(AddNewExpressionDialog, self).__init__()
self.setupUi(self)
self.name = None
self.expression = None
self.txtExpression.setPlainText(expression)
self.buttonBox.rejected.connect(self.cancelPressed)
self.buttonBox.accepted.connect(self.okPressed)
def cancelPressed(self):
self.close()
def okPressed(self):
self.name = self.txtName.text()
self.expression = self.txtExpression.toPlainText()
self.close()
WIDGET_DLG, BASE_DLG = uic.loadUiType(
os.path.join(pluginPath, 'PredefinedExpressionDialog.ui'))
class PredefinedExpressionDialog(BASE_DLG, WIDGET_DLG):
def __init__(self, expression, options):
super(PredefinedExpressionDialog, self).__init__()
self.setupUi(self)
self.filledExpression = None
self.options = options
self.expression = expression
self.variables = set(re.findall(r'\[.*?\]', expression))
self.comboBoxes = {}
for variable in self.variables:
label = QLabel(variable[1:-1])
combo = QComboBox()
for opt in self.options.keys():
combo.addItem(opt)
self.comboBoxes[variable] = combo
self.groupBox.layout().addWidget(label)
self.groupBox.layout().addWidget(combo)
verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.groupBox.layout().addItem(verticalSpacer)
self.buttonBox.rejected.connect(self.cancelPressed)
self.buttonBox.accepted.connect(self.okPressed)
def cancelPressed(self):
self.close()
def okPressed(self):
self.filledExpression = self.expression
for name, combo in self.comboBoxes.items():
self.filledExpression = self.filledExpression.replace(name,
self.options[combo.currentText()])
self.close()
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'RasterCalculatorWidget.ui'))
class ExpressionWidget(BASE, WIDGET):
_expressions = {"NDVI": "([NIR] - [Red]) / ([NIR] + [Red])"}
def __init__(self, options):
super(ExpressionWidget, self).__init__(None)
self.setupUi(self)
self.setList(options)
def doubleClicked(item):
self.text.insertPlainText('"{}"'.format(self.options[item.text()]))
def addButtonText(text):
if any(c for c in text if c.islower()):
self.text.insertPlainText(" {}()".format(text))
self.text.moveCursor(QTextCursor.PreviousCharacter, QTextCursor.MoveAnchor)
else:
self.text.insertPlainText(" {} ".format(text))
buttons = [b for b in self.buttonsGroupBox.children()if isinstance(b, QPushButton)]
for button in buttons:
button.clicked.connect(partial(addButtonText, button.text()))
self.listWidget.itemDoubleClicked.connect(doubleClicked)
self.expressions = {}
if os.path.exists(self.expsFile()):
with open(self.expsFile()) as f:
self.expressions.update(json.load(f))
self.expressions.update(self._expressions)
self.fillPredefined()
self.buttonAddPredefined.clicked.connect(self.addPredefined)
self.buttonSavePredefined.clicked.connect(self.savePredefined)
self.text.textChanged.connect(self.expressionValid)
def expressionValid(self):
errorString = ''
testNode = QgsRasterCalcNode.parseRasterCalcString(self.text.toPlainText(), errorString)
if not self.text.toPlainText():
self.expressionErrorLabel.setText(self.tr('Expression is empty'))
self.expressionErrorLabel.setStyleSheet("QLabel { color: black; }")
return False
if testNode:
self.expressionErrorLabel.setText(self.tr('Expression is valid'))
self.expressionErrorLabel.setStyleSheet("QLabel { color: green; font-weight: bold; }")
return True
self.expressionErrorLabel.setText(self.tr('Expression is not valid ') + errorString)
self.expressionErrorLabel.setStyleSheet("QLabel { color : red; font-weight: bold; }")
return False
def expsFile(self):
return os.path.join(userFolder(), 'rastercalcexpressions.json')
def addPredefined(self):
expression = self.expressions[self.comboPredefined.currentText()]
dlg = PredefinedExpressionDialog(expression, self.options)
dlg.exec_()
if dlg.filledExpression:
self.text.setPlainText(dlg.filledExpression)
def savePredefined(self):
exp = self.text.toPlainText()
used = [v for v in self.options.values() if v in exp]
for i, v in enumerate(used):
exp = exp.replace(v, f'[{chr(97 + i)}]')
dlg = AddNewExpressionDialog(exp)
dlg.exec_()
if dlg.name:
self.expressions[dlg.name] = dlg.expression
with open(self.expsFile(), "w") as f:
f.write(json.dumps(self.expressions))
def fillPredefined(self):
self.comboPredefined.clear()
for expression in self.expressions:
self.comboPredefined.addItem(expression)
def setList(self, options):
self.options = options
self.listWidget.clear()
entries = QgsRasterCalculatorEntry.rasterEntries()
def _find_source(name):
for entry in entries:
if entry.ref == name:
return entry.raster.source()
return ''
for name in options.keys():
item = QListWidgetItem(name, self.listWidget)
tooltip = _find_source(name)
if tooltip:
item.setData(Qt.ToolTipRole, tooltip)
self.listWidget.addItem(item)
def setValue(self, value):
self.text.setPlainText(value)
def value(self):
return self.text.toPlainText()
class ExpressionWidgetWrapper(WidgetWrapper):
def _panel(self, options):
return ExpressionWidget(options)
def _get_options(self):
entries = QgsRasterCalculatorEntry.rasterEntries()
options = {}
for entry in entries:
options[entry.ref] = entry.ref
return options
def createWidget(self):
if self.dialogType == DIALOG_STANDARD:
if iface is not None and iface.layerTreeView() is not None and iface.layerTreeView().layerTreeModel() is not None:
iface.layerTreeView().layerTreeModel().dataChanged.connect(self.refresh)
return self._panel(self._get_options())
elif self.dialogType == DIALOG_BATCH:
return QLineEdit()
else:
layers = self.dialog.getAvailableValuesOfType([QgsProcessingParameterRasterLayer], [QgsProcessingOutputRasterLayer])
options = {self.dialog.resolveValueDescription(lyr): "{}@1".format(self.dialog.resolveValueDescription(lyr)) for lyr in layers}
self.widget = self._panel(options)
return self.widget
def refresh(self, *args):
self.widget.setList(self._get_options())
def setValue(self, value):
if self.dialogType == DIALOG_STANDARD:
pass # TODO
elif self.dialogType == DIALOG_BATCH:
return self.widget.setText(value)
else:
self.widget.setValue(value)
def value(self):
if self.dialogType == DIALOG_STANDARD:
return self.widget.value()
elif self.dialogType == DIALOG_BATCH:
return self.widget.text()
else:
return self.widget.value()
class LayersListWidgetWrapper(WidgetWrapper):
def createWidget(self):
if self.dialogType == DIALOG_BATCH:
widget = BatchInputSelectionPanel(self.parameterDefinition(), self.row, self.col, self.dialog)
widget.valueChanged.connect(lambda: self.widgetValueHasChanged.emit(self))
return widget
else:
return None
def setValue(self, value):
if self.dialogType == DIALOG_BATCH:
return self.widget.setText(value)
def value(self):
if self.dialogType == DIALOG_STANDARD:
if self.param.datatype == dataobjects.TYPE_FILE:
return self.param.setValue(self.widget.selectedoptions)
else:
if self.param.datatype == dataobjects.TYPE_RASTER:
options = QgsProcessingUtils.compatibleRasterLayers(QgsProject.instance(), False)
elif self.param.datatype == dataobjects.TYPE_VECTOR_ANY:
options = QgsProcessingUtils.compatibleVectorLayers(QgsProject.instance(), [], False)
else:
options = QgsProcessingUtils.compatibleVectorLayers(QgsProject.instance(), [self.param.datatype], False)
return [options[i] for i in self.widget.selectedoptions]
elif self.dialogType == DIALOG_BATCH:
return self.widget.getText()
else:
options = self._getOptions()
values = [options[i] for i in self.widget.selectedoptions]
if len(values) == 0 and not self.parameterDefinition().flags() & QgsProcessingParameterDefinition.FlagOptional:
raise InvalidParameterValue()
return values
| gpl-2.0 |
laumann/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/headerparserhandler.py | 638 | 9836 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""PythonHeaderParserHandler for mod_pywebsocket.
Apache HTTP Server and mod_python must be configured such that this
function is called to handle WebSocket request.
"""
import logging
from mod_python import apache
from mod_pywebsocket import common
from mod_pywebsocket import dispatch
from mod_pywebsocket import handshake
from mod_pywebsocket import util
# PythonOption to specify the handler root directory.
_PYOPT_HANDLER_ROOT = 'mod_pywebsocket.handler_root'
# PythonOption to specify the handler scan directory.
# This must be a directory under the root directory.
# The default is the root directory.
_PYOPT_HANDLER_SCAN = 'mod_pywebsocket.handler_scan'
# PythonOption to allow handlers whose canonical path is
# not under the root directory. It's disallowed by default.
# Set this option with value of 'yes' to allow.
_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT = (
'mod_pywebsocket.allow_handlers_outside_root_dir')
# Map from values to their meanings. 'Yes' and 'No' are allowed just for
# compatibility.
_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT_DEFINITION = {
'off': False, 'no': False, 'on': True, 'yes': True}
# (Obsolete option. Ignored.)
# PythonOption to specify to allow handshake defined in Hixie 75 version
# protocol. The default is None (Off)
_PYOPT_ALLOW_DRAFT75 = 'mod_pywebsocket.allow_draft75'
# Map from values to their meanings.
_PYOPT_ALLOW_DRAFT75_DEFINITION = {'off': False, 'on': True}
class ApacheLogHandler(logging.Handler):
"""Wrapper logging.Handler to emit log message to apache's error.log."""
_LEVELS = {
logging.DEBUG: apache.APLOG_DEBUG,
logging.INFO: apache.APLOG_INFO,
logging.WARNING: apache.APLOG_WARNING,
logging.ERROR: apache.APLOG_ERR,
logging.CRITICAL: apache.APLOG_CRIT,
}
def __init__(self, request=None):
logging.Handler.__init__(self)
self._log_error = apache.log_error
if request is not None:
self._log_error = request.log_error
# Time and level will be printed by Apache.
self._formatter = logging.Formatter('%(name)s: %(message)s')
def emit(self, record):
apache_level = apache.APLOG_DEBUG
if record.levelno in ApacheLogHandler._LEVELS:
apache_level = ApacheLogHandler._LEVELS[record.levelno]
msg = self._formatter.format(record)
# "server" parameter must be passed to have "level" parameter work.
# If only "level" parameter is passed, nothing shows up on Apache's
# log. However, at this point, we cannot get the server object of the
# virtual host which will process WebSocket requests. The only server
# object we can get here is apache.main_server. But Wherever (server
# configuration context or virtual host context) we put
# PythonHeaderParserHandler directive, apache.main_server just points
# the main server instance (not any of virtual server instance). Then,
# Apache follows LogLevel directive in the server configuration context
# to filter logs. So, we need to specify LogLevel in the server
# configuration context. Even if we specify "LogLevel debug" in the
# virtual host context which actually handles WebSocket connections,
# DEBUG level logs never show up unless "LogLevel debug" is specified
# in the server configuration context.
#
# TODO(tyoshino): Provide logging methods on request object. When
# request is mp_request object (when used together with Apache), the
# methods call request.log_error indirectly. When request is
# _StandaloneRequest, the methods call Python's logging facility which
# we create in standalone.py.
self._log_error(msg, apache_level, apache.main_server)
def _configure_logging():
logger = logging.getLogger()
# Logs are filtered by Apache based on LogLevel directive in Apache
# configuration file. We must just pass logs for all levels to
# ApacheLogHandler.
logger.setLevel(logging.DEBUG)
logger.addHandler(ApacheLogHandler())
_configure_logging()
_LOGGER = logging.getLogger(__name__)
def _parse_option(name, value, definition):
if value is None:
return False
meaning = definition.get(value.lower())
if meaning is None:
raise Exception('Invalid value for PythonOption %s: %r' %
(name, value))
return meaning
def _create_dispatcher():
_LOGGER.info('Initializing Dispatcher')
options = apache.main_server.get_options()
handler_root = options.get(_PYOPT_HANDLER_ROOT, None)
if not handler_root:
raise Exception('PythonOption %s is not defined' % _PYOPT_HANDLER_ROOT,
apache.APLOG_ERR)
handler_scan = options.get(_PYOPT_HANDLER_SCAN, handler_root)
allow_handlers_outside_root = _parse_option(
_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT,
options.get(_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT),
_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT_DEFINITION)
dispatcher = dispatch.Dispatcher(
handler_root, handler_scan, allow_handlers_outside_root)
for warning in dispatcher.source_warnings():
apache.log_error(
'mod_pywebsocket: Warning in source loading: %s' % warning,
apache.APLOG_WARNING)
return dispatcher
# Initialize
_dispatcher = _create_dispatcher()
def headerparserhandler(request):
"""Handle request.
Args:
request: mod_python request.
This function is named headerparserhandler because it is the default
name for a PythonHeaderParserHandler.
"""
handshake_is_done = False
try:
# Fallback to default http handler for request paths for which
# we don't have request handlers.
if not _dispatcher.get_handler_suite(request.uri):
request.log_error(
'mod_pywebsocket: No handler for resource: %r' % request.uri,
apache.APLOG_INFO)
request.log_error(
'mod_pywebsocket: Fallback to Apache', apache.APLOG_INFO)
return apache.DECLINED
except dispatch.DispatchException, e:
request.log_error(
'mod_pywebsocket: Dispatch failed for error: %s' % e,
apache.APLOG_INFO)
if not handshake_is_done:
return e.status
try:
allow_draft75 = _parse_option(
_PYOPT_ALLOW_DRAFT75,
apache.main_server.get_options().get(_PYOPT_ALLOW_DRAFT75),
_PYOPT_ALLOW_DRAFT75_DEFINITION)
try:
handshake.do_handshake(
request, _dispatcher, allowDraft75=allow_draft75)
except handshake.VersionException, e:
request.log_error(
'mod_pywebsocket: Handshake failed for version error: %s' % e,
apache.APLOG_INFO)
request.err_headers_out.add(common.SEC_WEBSOCKET_VERSION_HEADER,
e.supported_versions)
return apache.HTTP_BAD_REQUEST
except handshake.HandshakeException, e:
# Handshake for ws/wss failed.
# Send http response with error status.
request.log_error(
'mod_pywebsocket: Handshake failed for error: %s' % e,
apache.APLOG_INFO)
return e.status
handshake_is_done = True
request._dispatcher = _dispatcher
_dispatcher.transfer_data(request)
except handshake.AbortedByUserException, e:
request.log_error('mod_pywebsocket: Aborted: %s' % e, apache.APLOG_INFO)
except Exception, e:
# DispatchException can also be thrown if something is wrong in
# pywebsocket code. It's caught here, then.
request.log_error('mod_pywebsocket: Exception occurred: %s\n%s' %
(e, util.get_stack_trace()),
apache.APLOG_ERR)
# Unknown exceptions before handshake mean Apache must handle its
# request with another handler.
if not handshake_is_done:
return apache.DECLINED
# Set assbackwards to suppress response header generation by Apache.
request.assbackwards = 1
return apache.DONE # Return DONE such that no other handlers are invoked.
# vi:sts=4 sw=4 et
| mpl-2.0 |
tepperly/MixDown | md/utilityFunctions.py | 1 | 11238 | # Copyright (c) 2010-2014, Lawrence Livermore National Security, LLC
# Produced at Lawrence Livermore National Laboratory
# LLNL-CODE-462894
# All rights reserved.
#
# This file is part of MixDown. Please read the COPYRIGHT file
# for Our Notice and the LICENSE file for the GNU Lesser General Public
# License.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License (as published by
# the Free Software Foundation) version 3 dated June 2007.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import fileinput
import os, Queue, re, shutil, subprocess, sys, tarfile, tempfile, urllib, urllib2, zipfile
def boolToStr(s):
if s.lower() == "true":
return True
return False
def downloadFile(URL, downloadDir):
filePath = os.path.join(downloadDir, URLToFilename(URL))
if not os.path.exists(downloadDir):
os.mkdir(downloadDir)
urllib.urlretrieve(URL, filePath)
if not os.path.exists(filePath):
filePath = ""
return filePath
def executeCommand(command, args="", workingDirectory="", verbose=False, exitOnError=False):
try:
lastcwd = os.getcwd()
if workingDirectory != "":
os.chdir(workingDirectory)
fullCommand = command + args
if verbose:
print "Executing: " + fullCommand + ": Working Directory: " + workingDirectory
errorCode = os.system(fullCommand)
if exitOnError and errorCode != 0:
printErrorAndExit("Command '" + fullCommand + "': exited with error code " + str(errorCode))
finally:
os.chdir(lastcwd)
def executeSubProcess(command, workingDirectory=tempfile.gettempdir(), outFileHandle=1, verbose=False, exitOnError=False):
if verbose:
print "Executing: " + command + ": Working Directory: " + workingDirectory
#***************************************************************************************************************
#Note: Even though python's documentation says that "shell=True" opens up a computer for malicious shell commands,
# it is needed to allow users to fully utilize shell commands, such as cd. Also it does not open up any additional
# vulnerabilities that did not already exist by running any other build tools, such as Make or configure.
#***************************************************************************************************************
process = subprocess.Popen(command, stdout=outFileHandle, stderr=outFileHandle, shell=True, cwd=workingDirectory)
process.wait()
if exitOnError and process.returncode != 0:
printErrorAndExit("Command '" + command + "': exited with error code " + str(process.returncode))
return process.returncode
def findFilesWithExtension(path, extension):
foundFiles = []
for currName in os.listdir(path):
currPath = os.path.join(path, currName)
if os.path.isfile(currPath):
if os.path.splitext(currPath)[1] == extension:
foundFiles.append(currPath)
return foundFiles
def findShallowestFile(startPath, fileList):
q = Queue.Queue()
q.put(startPath)
while not q.empty():
currPath = q.get()
for item in os.listdir(currPath):
itemPath = os.path.join(currPath, item)
if os.path.isdir(itemPath):
q.put(itemPath)
elif item in fileList:
return currPath + item # success
def getBasename(path):
basename = os.path.basename(path)
if not os.path.isdir(path):
i = 0
for c in basename:
if (c == '.') and (not i == 0):
break
i += 1
basename = basename[:i]
return basename
def haveWriteAccess(path):
highestExistingDir = path
while highestExistingDir != os.sep:
if os.path.exists(highestExistingDir):
break
if highestExistingDir[len(highestExistingDir)-1] == os.sep:
highestExistingDir = highestExistingDir[:-1]
highestExistingDir = highestExistingDir[:highestExistingDir.rfind(os.sep)+1]
if os.access(highestExistingDir, os.W_OK):
return True
return False
def isURL(url):
try:
f = urllib2.urlopen(url)
return True
except:
return False
def __pathExists(directory, basename):
listDir = os.listdir(directory)
if basename in listDir:
return True
return False
def pathExists(path, forceCaseSensitive=False):
if not forceCaseSensitive:
return os.path.exists(path)
fullPath = os.path.abspath(path)
fullPathList = fullPath.split(os.path.sep)
directory = "/"
for basename in fullPathList[1:]:
if not __pathExists(directory, basename):
return False
directory = os.path.join(directory, basename)
return True
def prettyPrintList(list, header="", headerIndent="", itemIndent=""):
retStr = headerIndent + header
listLen = len(list)
if listLen == 1:
retStr = retStr + list[0]
elif listLen > 0:
for currItem in list:
retStr = retStr + "\n" + itemIndent + currItem
return retStr
def printErrorAndExit(errorStr, filePath="", lineNumber=0):
sys.stdout.flush()
if filePath == "" and lineNumber == 0:
sys.stderr.write("Error: %s\n" % (errorStr))
elif lineNumber == 0:
sys.stderr.write("Error: %s: %s\n" % (filePath, errorStr))
else:
sys.stderr.write("Error: %s (line %d): %s\n" % (filePath, lineNumber, errorStr))
sys.stderr.flush()
sys.exit()
def removeDir(path):
if (path[len(path)-1:] == '/') and os.path.isfile(path[:len(path)-1]):
raise IOError("Error: Cannot clean directory '" + path + "' : File (not directory) exists by the same name.")
if os.path.exists(path):
shutil.rmtree(path)
def removeDuplicatesFromList(myList):
l = myList[:]
if l:
l.sort()
last = l[-1]
for i in range(len(l)-2, -1, -1):
if last == l[i]:
del l[i]
else:
last = l[i]
return l
def splitFileName(fileName):
if fileName.endswith(os.path.sep):
basename = fileName[:-1]
else:
basename = fileName
version = ""
if basename.endswith(".tar.gz"):
basename = basename[:-7]
elif basename.endswith(".tar.bz2"):
basename = basename[:-8]
elif basename.endswith(".zip") or basename.endswith(".tar") or\
basename.endswith(".tgz") or basename.endswith(".tbz") or\
basename.endswith(".tb2"):
basename = basename[:-4]
basename = os.path.basename(basename)
i = 0
for c in basename:
if c in ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0']:
version = basename[i:]
basename = basename[:i]
if basename[-1] == '-':
basename = basename[:-1]
break
i += 1
return basename, version
def stripItemsInList(value):
retList = []
for item in value[:]:
retList.append(str.strip(item))
return retList
def untar(tarPath, outPath="", stripDir=False):
if stripDir:
unTarOutpath = tempfile.mkdtemp()
else:
unTarOutpath = outPath
tar = tarfile.open(tarPath, "r")
for item in tar:
#TODO: check for relative path's
tar.extract(item, unTarOutpath)
if stripDir:
dirList = os.listdir(unTarOutpath)
if len(dirList) == 1:
src = os.path.join(unTarOutpath, dirList[0])
else:
src = unTarOutpath
shutil.move(src, outPath)
def unzip(zipPath, outPath="", stripDir=False):
if stripDir:
unZipOutpath = tempfile.mkdtemp()
else:
unZipOutpath = outPath
z = zipfile.ZipFile(zipPath)
z.extractall(unZipOutpath)
if stripDir:
dirList = os.listdir(unZipOutpath)
if len(dirList) == 1:
src = os.path.join(unZipOutpath, dirList[0])
else:
src = unZipOutpath
shutil.move(src, outPath)
def URLToFilename(url):
if url.endswith(os.sep):
url = url[:1]
#This works around sourceforge not having the filename last in the url
pattern = r"https?://(www\.)?((sf)|(sourceforge))\.net/.*/(?P<filename>[^/]+((\.tar.gz)|(\.tar)|(\.tar.bz2)|(\.tgz)|(\.tbz)|(\.tb2)|(\.zip)))/download"
regex = re.compile(pattern)
match = regex.search(url)
if match != None:
fileName = match.group("filename")
if fileName != None and fileName != "":
return fileName
fileName = url[(url.rfind(os.sep)+1):]
if url == "" or file == "":
return "_"
return fileName
def hasTarFileExtension(path):
if path.endswith(".tar.gz") or path.endswith(".tar.bz2") or\
path.endswith(".tar") or path.endswith(".tgz") or\
path.endswith(".tbz") or path.endswith(".tb2"):
return True
return False
def validateCompressedFile(path, logger=None):
if tarfile.is_tarfile(path):
return True
elif zipfile.is_zipfile(path):
return True
elif logger:
if hasTarFileExtension(path):
logger.writeError("Given tar file '" + path +"' not understood by python's tarfile package and possibly corrupt")
elif path.endswith(".zip"):
logger.writeError("Given zip file '" + path +"' not understood by python's zipfile package and possibly corrupt")
else:
logger.writeError("Given file '" + path + "' cannot be uncompressed")
return False
def setVariables(filename, variableList):
isWritten = False
try:
for i, line in enumerate(fileinput.input(filename, inplace = 1)):
isWritten = False
line = line.strip()
for variable,value in variableList.iteritems():
if(line.startswith(variable)):
sys.stdout.write(variable+" = "+value+"\n")
isWritten = True
if not isWritten:
sys.stdout.write(line+"\n")
return True
except Exception, e:
print e
return False
def is_exe(fpath):
"""Check if a file is an executable accessible by the user"""
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
def isInstalled(program):
"""Checks if a program is installed by searching for executable files in the PATH.
See: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python#377028
"""
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return True
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return True
return False
| lgpl-3.0 |
w1kke/pylearn2 | pylearn2/termination_criteria/__init__.py | 12 | 9143 | """
Termination criteria used to determine when to stop running a training
algorithm.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import functools
import numpy as np
class TerminationCriterion(object):
"""
A callable used to determine if a TrainingAlgorithm should quit
running.
"""
def continue_learning(self, model):
"""
Returns True if training should continue for this model,
False otherwise
Parameters
----------
model : a Model instance
Returns
-------
bool
True or False as described above
"""
raise NotImplementedError(str(type(self)) + " does not implement " +
"continue_learning.")
class MonitorBased(TerminationCriterion):
"""
A termination criterion that pulls out the specified channel in
the model's monitor and checks to see if it has decreased by a
certain proportion of the lowest value in the last N epochs.
Parameters
----------
prop_decrease : float
The threshold factor by which we expect the channel value to have
decreased
N : int
Number of epochs to look back
channel_name : string, optional
Name of the channel to examine. If None and the monitor
has only one channel, this channel will be used; otherwise, an
error will be raised.
"""
def __init__(self, prop_decrease=.01, N=5, channel_name=None):
self._channel_name = channel_name
self.prop_decrease = prop_decrease
self.N = N
self.countdown = N
self.best_value = np.inf
def continue_learning(self, model):
"""
The optimization should stop if the model has run for
N epochs without sufficient improvement.
Parameters
----------
model : Model
The model used in the experiment and from which the monitor
used in the termination criterion will be extracted.
Returns
-------
bool
True if training should continue
"""
monitor = model.monitor
# In the case the monitor has only one channel, the channel_name can
# be omitted and the criterion will examine the only channel
# available. However, if the monitor has multiple channels, leaving
# the channel_name unspecified will raise an error.
if self._channel_name is None:
v = monitor.channels['objective'].val_record
else:
v = monitor.channels[self._channel_name].val_record
# The countdown decreases every time the termination criterion is
# called unless the channel value is lower than the best value times
# the prop_decrease factor, in which case the countdown is reset to N
# and the best value is updated
if v[-1] < (1. - self.prop_decrease) * self.best_value:
self.countdown = self.N
else:
self.countdown = self.countdown - 1
if v[-1] < self.best_value:
self.best_value = v[-1]
# The optimization continues until the countdown has reached 0,
# meaning that N epochs have passed without the model improving
# enough.
return self.countdown > 0
class MatchChannel(TerminationCriterion):
"""
Stop training when a cost function reaches the same value as a cost
function from a previous training run.
(Useful for getting training likelihood on entire training set to
match validation likelihood from an earlier early stopping run)
Parameters
----------
channel_name : str
The name of the new channel that we want to match the final value
from the previous training run
prev_channel_name : str
The name of the channel from the previous run that we want to match
prev_monitor_name : str
The name of the field of the model instance containing the monitor
from the previous training run
"""
def __init__(self, channel_name, prev_channel_name, prev_monitor_name):
self.__dict__.update(locals())
self.target = None
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
if self.target is None:
prev_monitor = getattr(model, self.prev_monitor_name)
channels = prev_monitor.channels
prev_channel = channels[self.prev_channel_name]
self.target = prev_channel.val_record[-1]
monitor = model.monitor
channels = monitor.channels
channel = channels[self.channel_name]
current = channel.val_record[-1]
rval = current > self.target
return rval
class ChannelTarget(TerminationCriterion):
"""
Stop training when a cost function reaches some target value.
Parameters
----------
channel_name : str
The name of the channel to track
target : float
Quit training after the channel is below this value
"""
def __init__(self, channel_name, target):
target = float(target)
self.__dict__.update(locals())
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
monitor = model.monitor
channels = monitor.channels
channel = channels[self.channel_name]
rval = channel.val_record[-1] > self.target
return rval
class ChannelInf(TerminationCriterion):
"""
Stop training when a channel value reaches Inf or -inf.
Parameters
----------
channel_name : str
The channel to track.
"""
def __init__(self, channel_name):
self.__dict__.update(locals())
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
monitor = model.monitor
channels = monitor.channels
channel = channels[self.channel_name]
rval = np.isinf(channel.val_record[-1])
return rval
class EpochCounter(TerminationCriterion):
"""
Learn for a fixed number of epochs.
A termination criterion that uses internal state to trigger termination
after a fixed number of calls (epochs).
Parameters
----------
max_epochs : int
Number of epochs (i.e. calls to this object's `__call__`
method) after which this termination criterion should
return `False`.
new_epochs : bool, optional
If True, epoch counter starts from 0. Otherwise it
starts from model.monitor.get_epochs_seen()
"""
def __init__(self, max_epochs, new_epochs=True):
self._max_epochs = max_epochs
self._new_epochs = new_epochs
def initialize(self, model):
if self._new_epochs:
self._epochs_done = 0
else:
# epochs_seen = 1 on first continue_learning() call
self._epochs_done = model.monitor.get_epochs_seen() - 1
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
if not hasattr(self, "_epochs_done"):
self.initialize(model)
self._epochs_done += 1
return self._epochs_done < self._max_epochs
class And(TerminationCriterion):
"""
Keep learning until any of a set of criteria wants to stop.
Termination criterion representing the logical conjunction
of several individual criteria. Optimization continues only
if every constituent criterion returns `True`.
Parameters
----------
criteria : iterable
A sequence of callables representing termination criteria,
with a return value of True indicating that training
should continue.
"""
def __init__(self, criteria):
assert all(isinstance(x, TerminationCriterion) for x in list(criteria))
self._criteria = list(criteria)
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
return all(criterion.continue_learning(model)
for criterion in self._criteria)
class Or(TerminationCriterion):
"""
Keep learning as long as any of some set of criteria say to do so.
Termination criterion representing the logical disjunction
of several individual criteria. Optimization continues if
any of the constituent criteria return `True`.
Parameters
----------
criteria : iterable
A sequence of callables representing termination criteria,
with a return value of True indicating that gradient
descent should continue.
"""
def __init__(self, criteria):
assert all(isinstance(x, TerminationCriterion) for x in list(criteria))
self._criteria = list(criteria)
@functools.wraps(TerminationCriterion.continue_learning)
def continue_learning(self, model):
return any(criterion.continue_learning(model)
for criterion in self._criteria)
| bsd-3-clause |
feigaochn/leetcode | p1_two_sum.py | 2 | 1360 | # author: Fei Gao
#
# Two Sum
#
# Given an array of integers, find two numbers such that they add up to a
# specific target number.
# The function twoSum should return indices of the two numbers such that they
# add up to the target, where index1 must be less than index2. Please note
# that your returned answers (both index1 and index2) are not zero-based.
# You may assume that each input would have exactly one solution.
# Input: numbers={2, 7, 11, 15}, target=9
# Output: index1=1, index2=2
class Solution:
# @return a tuple, (index1, index2)
def twoSum(self, num, target):
nums = sorted(list(num))
target = int(target)
s = 0
e = len(nums) - 1
while s < e:
su = nums[s] + nums[e]
if su == target:
si = num.index(nums[s])
ei = num.index(nums[e], si + 1 if nums[s] == nums[e] else 0)
return min(si + 1, ei + 1), max(si + 1, ei + 1)
elif su < target:
s += 1
elif su > target:
e -= 1
def main():
solver = Solution()
tests = [([2, 7, 11, 15], 9), ([0, 4, 3, 0], 0)]
for test in tests:
result = solver.twoSum(*test)
print(test)
print(' ->')
print(result)
print('~' * 10)
pass
if __name__ == '__main__':
main()
pass
| mit |
chandlercr/aima-python | submissions/Porter/myNN.py | 16 | 6217 | from sklearn import datasets
from sklearn.neural_network import MLPClassifier
import traceback
from submissions.aartiste import election
from submissions.aartiste import county_demographics
class DataFrame:
data = []
feature_names = []
target = []
target_names = []
trumpECHP = DataFrame()
'''
Extract data from the CORGIS elections, and merge it with the
CORGIS demographics. Both data sets are organized by county and state.
'''
joint = {}
elections = election.get_results()
for county in elections:
try:
st = county['Location']['State Abbreviation']
countyST = county['Location']['County'] + st
trump = county['Vote Data']['Donald Trump']['Percent of Votes']
joint[countyST] = {}
joint[countyST]['ST']= st
joint[countyST]['Trump'] = trump
except:
traceback.print_exc()
demographics = county_demographics.get_all_counties()
for county in demographics:
try:
countyNames = county['County'].split()
cName = ' '.join(countyNames[:-1])
st = county['State']
countyST = cName + st
# elderly =
# college =
# home =
# poverty =
if countyST in joint:
joint[countyST]['Elderly'] = county['Age']["Percent 65 and Older"]
joint[countyST]['HighSchool'] = county['Education']["High School or Higher"]
joint[countyST]['College'] = county['Education']["Bachelor's Degree or Higher"]
joint[countyST]['White'] = county['Ethnicities']["White Alone, not Hispanic or Latino"]
joint[countyST]['Persons'] = county['Housing']["Persons per Household"]
joint[countyST]['Home'] = county['Housing']["Homeownership Rate"]
joint[countyST]['Income'] = county['Income']["Median Houseold Income"]
joint[countyST]['Poverty'] = county['Income']["Persons Below Poverty Level"]
joint[countyST]['Sales'] = county['Sales']["Retail Sales per Capita"]
except:
traceback.print_exc()
'''
Remove the counties that did not appear in both samples.
'''
intersection = {}
for countyST in joint:
if 'College' in joint[countyST]:
intersection[countyST] = joint[countyST]
trumpECHP.data = []
'''
Build the input frame, row by row.
'''
for countyST in intersection:
# choose the input values
row = []
for key in intersection[countyST]:
if key in ['ST', 'Trump']:
continue
row.append(intersection[countyST][key])
trumpECHP.data.append(row)
firstCounty = next(iter(intersection.keys()))
firstRow = intersection[firstCounty]
trumpECHP.feature_names = list(firstRow.keys())
trumpECHP.feature_names.remove('ST')
trumpECHP.feature_names.remove('Trump')
'''
Build the target list,
one entry for each row in the input frame.
The Naive Bayesian network is a classifier,
i.e. it sorts data points into bins.
The best it can do to estimate a continuous variable
is to break the domain into segments, and predict
the segment into which the variable's value will fall.
In this example, I'm breaking Trump's % into two
arbitrary segments.
'''
trumpECHP.target = []
def trumpTarget(percentage):
if percentage > 45:
return 1
return 0
for countyST in intersection:
# choose the target
tt = trumpTarget(intersection[countyST]['Trump'])
trumpECHP.target.append(tt)
trumpECHP.target_names = [
'Trump <= 45%',
'Trump > 45%',
]
'''
Make a customn classifier,
'''
mlpc = MLPClassifier(
hidden_layer_sizes = (100, 50, ),
# activation = 'relu',
solver='sgd', # 'adam',
# alpha = 0.0001,
# batch_size='auto',
learning_rate = 'adaptive', # 'constant',
# power_t = 0.5,
max_iter = 1000, # 200,
# shuffle = True,
# random_state = None,
# tol = 1e-4,
# verbose = False,
# warm_start = False,
# momentum = 0.9,
# nesterovs_momentum = True,
# early_stopping = False,
# validation_fraction = 0.1,
# beta_1 = 0.9,
# beta_2 = 0.999,
# epsilon = 1e-8,
)
'''
Try scaling the data.
'''
trumpScaled = DataFrame()
def setupScales(grid):
global min, max
min = list(grid[0])
max = list(grid[0])
for row in range(1, len(grid)):
for col in range(len(grid[row])):
cell = grid[row][col]
if cell < min[col]:
min[col] = cell
if cell > max[col]:
max[col] = cell
def scaleGrid(grid):
newGrid = []
for row in range(len(grid)):
newRow = []
for col in range(len(grid[row])):
try:
cell = grid[row][col]
scaled = (cell - min[col]) \
/ (max[col] - min[col])
newRow.append(scaled)
except:
pass
newGrid.append(newRow)
return newGrid
setupScales(trumpECHP.data)
trumpScaled.data = scaleGrid(trumpECHP.data)
trumpScaled.feature_names = trumpECHP.feature_names
trumpScaled.target = trumpECHP.target
trumpScaled.target_names = trumpECHP.target_names
'''
Teach a Neural net to count 2
'''
count22 = DataFrame()
count22.data = [[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1],
[1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]]
count22.feature_names = ['a', 'b', 'c']
count22.target = [0, 0, 0, 1,
0, 1, 1, 0]
count22.target_names = ['Two']
countMLPC = MLPClassifier(
hidden_layer_sizes = (3,), # (100,),
# activation = 'relu',
solver='sgd', # 'adam',
# alpha = 0.0001,
# batch_size='auto',
# learning_rate = 'constant',
# power_t = 0.5,
max_iter = 10, # 200,
# shuffle = True,
# random_state = None,
# tol = 1e-4,
verbose = True # False,
# warm_start = False,
# momentum = 0.9,
# nesterovs_momentum = True,
# early_stopping = False,
# validation_fraction = 0.1,
# beta_1 = 0.9,
# beta_2 = 0.999,
# epsilon = 1e-8,
)
Examples = {
# 'TrumpDefault': {
# 'frame': trumpECHP,
# },
# 'TrumpSGD': {
# 'frame': trumpECHP,
# 'mlpc': mlpc
# },
# 'TrumpScaled': {
# 'frame': trumpScaled,
# },
'Count to 2': {
'frame': count22,
'mlpc': countMLPC
}
} | mit |
Stanford-Online/edx-platform | cms/djangoapps/contentstore/features/problem-editor.py | 1 | 12940 | # disable missing docstring
# pylint: disable=missing-docstring
import json
from lettuce import step, world
from nose.tools import assert_equal, assert_true
from advanced_settings import ADVANCED_MODULES_KEY, change_value
from common import open_new_course, type_in_codemirror
from course_import import import_file
DISPLAY_NAME = "Display Name"
MAXIMUM_ATTEMPTS = "Maximum Attempts"
PROBLEM_WEIGHT = "Problem Weight"
RANDOMIZATION = 'Randomization'
SHOW_ANSWER = "Show Answer"
SHOW_ANSWER_AFTER_SOME_NUMBER_OF_ATTEMPTS = 'Show Answer: Number of Attempts'
SHOW_RESET_BUTTON = "Show Reset Button"
TIMER_BETWEEN_ATTEMPTS = "Timer Between Attempts"
MATLAB_API_KEY = "Matlab API key"
@step('I have created a Blank Common Problem$')
def i_created_blank_common_problem(step):
step.given('I am in Studio editing a new unit')
step.given("I have created another Blank Common Problem")
@step('I have created a unit with advanced module "(.*)"$')
def i_created_unit_with_advanced_module(step, advanced_module):
step.given('I am in Studio editing a new unit')
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, ADVANCED_MODULES_KEY, '["{}"]'.format(advanced_module))
world.visit(url)
world.wait_for_xmodule()
@step('I have created an advanced component "(.*)" of type "(.*)"')
def i_create_new_advanced_component(step, component_type, advanced_component):
world.create_component_instance(
step=step,
category='advanced',
component_type=component_type,
advanced_component=advanced_component
)
@step('I have created another Blank Common Problem$')
def i_create_new_common_problem(step):
world.create_component_instance(
step=step,
category='problem',
component_type='Blank Common Problem'
)
@step('when I mouseover on "(.*)"')
def i_mouseover_on_html_component(step, element_class):
action_css = '.{}'.format(element_class)
world.trigger_event(action_css, event='mouseover')
@step(u'I can see Reply to Annotation link$')
def i_see_reply_to_annotation_link(_step):
css_selector = 'a.annotatable-reply'
world.wait_for_visible(css_selector)
@step(u'I see that page has scrolled "(.*)" when I click on "(.*)" link$')
def i_see_annotation_problem_page_scrolls(_step, scroll_direction, link_css):
scroll_js = "$(window).scrollTop();"
scroll_height_before = world.browser.evaluate_script(scroll_js)
world.css_click("a.{}".format(link_css))
scroll_height_after = world.browser.evaluate_script(scroll_js)
if scroll_direction == "up":
assert scroll_height_after < scroll_height_before
elif scroll_direction == "down":
assert scroll_height_after > scroll_height_before
@step('I have created an advanced problem of type "(.*)"$')
def i_create_new_advanced_problem(step, component_type):
world.create_component_instance(
step=step,
category='problem',
component_type=component_type,
is_advanced=True
)
@step('I edit and select Settings$')
def i_edit_and_select_settings(_step):
world.edit_component_and_select_settings()
@step('I see the advanced settings and their expected values$')
def i_see_advanced_settings_with_values(step):
world.verify_all_setting_entries(
[
[DISPLAY_NAME, "Blank Common Problem", True],
[MATLAB_API_KEY, "", False],
[MAXIMUM_ATTEMPTS, "", False],
[PROBLEM_WEIGHT, "", False],
[RANDOMIZATION, "Never", False],
[SHOW_ANSWER, "Finished", False],
[SHOW_ANSWER_AFTER_SOME_NUMBER_OF_ATTEMPTS, '0', False],
[SHOW_RESET_BUTTON, "False", False],
[TIMER_BETWEEN_ATTEMPTS, "0", False],
])
@step('I can modify the display name')
def i_can_modify_the_display_name(_step):
# Verifying that the display name can be a string containing a floating point value
# (to confirm that we don't throw an error because it is of the wrong type).
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, '3.4')
verify_modified_display_name()
@step('my display name change is persisted on save')
def my_display_name_change_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name()
@step('the problem display name is "(.*)"$')
def verify_problem_display_name(step, name):
"""
name is uppercased because the heading styles are uppercase in css
"""
assert_equal(name, world.browser.find_by_css('.problem-header').text)
@step('I can specify special characters in the display name')
def i_can_modify_the_display_name_with_special_chars(_step):
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, "updated ' \" &")
verify_modified_display_name_with_special_chars()
@step('I can specify html in the display name and save')
def i_can_modify_the_display_name_with_html(_step):
"""
If alert appear on save then UnexpectedAlertPresentException
will occur and test will fail.
"""
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, "<script>alert('test')</script>")
verify_modified_display_name_with_html()
world.save_component()
@step('my special characters and persisted on save')
def special_chars_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name_with_special_chars()
@step('I can revert the display name to unset')
def can_revert_display_name_to_unset(_step):
world.revert_setting_entry(DISPLAY_NAME)
verify_unset_display_name()
@step('my display name is unset on save')
def my_display_name_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_unset_display_name()
@step('I can select Per Student for Randomization')
def i_can_select_per_student_for_randomization(_step):
world.browser.select(RANDOMIZATION, "Per Student")
verify_modified_randomization()
@step('my change to randomization is persisted')
def my_change_to_randomization_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_randomization()
@step('I can revert to the default value for randomization')
def i_can_revert_to_default_for_randomization(step):
world.revert_setting_entry(RANDOMIZATION)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Never", False)
@step('I can set the weight to "(.*)"?')
def i_can_set_weight(_step, weight):
set_weight(weight)
verify_modified_weight()
@step('my change to weight is persisted')
def my_change_to_weight_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_weight()
@step('I can revert to the default value of unset for weight')
def i_can_revert_to_default_for_unset_weight(step):
world.revert_setting_entry(PROBLEM_WEIGHT)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the weight to "(.*)", it remains unset')
def set_the_weight_to_abc(step, bad_weight):
set_weight(bad_weight)
# We show the clear button immediately on type, hence the "True" here.
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", True)
world.save_component_and_reopen(step)
# But no change was actually ever sent to the model, so on reopen, explicitly_set is False
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the max attempts to "(.*)", it will persist as a valid integer$')
def set_the_max_attempts(step, max_attempts_set):
# on firefox with selenium, the behavior is different.
# eg 2.34 displays as 2.34 and is persisted as 2
index = world.get_setting_entry_index(MAXIMUM_ATTEMPTS)
world.set_field_value(index, max_attempts_set)
world.save_component_and_reopen(step)
value = world.css_value('input.setting-input', index=index)
assert value != "", "max attempts is blank"
assert int(value) >= 0
@step('Edit High Level Source is not visible')
def edit_high_level_source_not_visible(step):
verify_high_level_source_links(step, False)
@step('Edit High Level Source is visible')
def edit_high_level_source_links_visible(step):
verify_high_level_source_links(step, True)
@step('If I press Cancel my changes are not persisted')
def cancel_does_not_save_changes(step):
world.cancel_component(step)
step.given("I edit and select Settings")
step.given("I see the advanced settings and their expected values")
@step('I have enabled latex compiler')
def enable_latex_compiler(step):
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, 'Enable LaTeX Compiler', 'true')
world.visit(url)
world.wait_for_xmodule()
@step('I have created a LaTeX Problem')
def create_latex_problem(step):
step.given('I am in Studio editing a new unit')
step.given('I have enabled latex compiler')
world.create_component_instance(
step=step,
category='problem',
component_type='Problem Written in LaTeX',
is_advanced=True
)
@step('I edit and compile the High Level Source')
def edit_latex_source(_step):
open_high_level_source()
type_in_codemirror(1, "hi")
world.css_click('.hls-compile')
@step('my change to the High Level Source is persisted')
def high_level_source_persisted(_step):
def verify_text(driver):
css_sel = '.problem div>span'
return world.css_text(css_sel) == 'hi'
world.wait_for(verify_text, timeout=10)
@step('I view the High Level Source I see my changes')
def high_level_source_in_editor(_step):
open_high_level_source()
assert_equal('hi', world.css_value('.source-edit-box'))
@step(u'I have an empty course')
def i_have_empty_course(step):
open_new_course()
@step(u'I import the file "([^"]*)"$')
def i_import_the_file(_step, filename):
import_file(filename)
@step(u'I go to the vertical "([^"]*)"$')
def i_go_to_vertical(_step, vertical):
world.css_click("span:contains('{0}')".format(vertical))
@step(u'I go to the unit "([^"]*)"$')
def i_go_to_unit(_step, unit):
loc = "window.location = $(\"span:contains('{0}')\").closest('a').attr('href')".format(unit)
world.browser.execute_script(loc)
@step(u'I see a message that says "([^"]*)"$')
def i_can_see_message(_step, msg):
msg = json.dumps(msg) # escape quotes
world.css_has_text("h2.title", msg)
@step(u'I can edit the problem$')
def i_can_edit_problem(_step):
world.edit_component()
@step(u'I edit first blank advanced problem for annotation response$')
def i_edit_blank_problem_for_annotation_response(_step):
world.edit_component(1)
text = """
<problem>
<annotationresponse>
<annotationinput><text>Text of annotation</text></annotationinput>
</annotationresponse>
</problem>"""
type_in_codemirror(0, text)
world.save_component()
@step(u'I can see cheatsheet$')
def verify_cheat_sheet_displaying(_step):
world.css_click(".cheatsheet-toggle")
css_selector = '.simple-editor-cheatsheet'
world.wait_for_visible(css_selector)
def verify_high_level_source_links(step, visible):
if visible:
assert_true(world.is_css_present('.launch-latex-compiler'),
msg="Expected to find the latex button but it is not present.")
else:
assert_true(world.is_css_not_present('.launch-latex-compiler'),
msg="Expected not to find the latex button but it is present.")
world.cancel_component(step)
def verify_modified_weight():
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "3.5", True)
def verify_modified_randomization():
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Per Student", True)
def verify_modified_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, '3.4', True)
def verify_modified_display_name_with_special_chars():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, "updated ' \" &", True)
def verify_modified_display_name_with_html():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, "<script>alert('test')</script>", True)
def verify_unset_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, 'Blank Advanced Problem', False)
def set_weight(weight):
index = world.get_setting_entry_index(PROBLEM_WEIGHT)
world.set_field_value(index, weight)
def open_high_level_source():
world.edit_component()
world.css_click('.launch-latex-compiler > a')
| agpl-3.0 |
drzaeus77/pyroute2 | pyroute2/netlink/taskstats/__init__.py | 7 | 5936 | '''
TaskStats module
================
All that you should know about TaskStats, is that you should not
use it. But if you have to, ok::
import os
from pyroute2 import TaskStats
ts = TaskStats()
ts.get_pid_stat(os.getpid())
It is not implemented normally yet, but some methods are already
usable.
'''
from pyroute2.netlink import NLM_F_REQUEST
from pyroute2.netlink import nla
from pyroute2.netlink import genlmsg
from pyroute2.netlink.generic import GenericNetlinkSocket
TASKSTATS_CMD_UNSPEC = 0 # Reserved
TASKSTATS_CMD_GET = 1 # user->kernel request/get-response
TASKSTATS_CMD_NEW = 2
class tcmd(genlmsg):
nla_map = (('TASKSTATS_CMD_ATTR_UNSPEC', 'none'),
('TASKSTATS_CMD_ATTR_PID', 'uint32'),
('TASKSTATS_CMD_ATTR_TGID', 'uint32'),
('TASKSTATS_CMD_ATTR_REGISTER_CPUMASK', 'asciiz'),
('TASKSTATS_CMD_ATTR_DEREGISTER_CPUMASK', 'asciiz'))
class tstats(nla):
pack = "struct"
fields = (('version', 'H'), # 2
('ac_exitcode', 'I'), # 4
('ac_flag', 'B'), # 1
('ac_nice', 'B'), # 1 --- 10
('cpu_count', 'Q'), # 8
('cpu_delay_total', 'Q'), # 8
('blkio_count', 'Q'), # 8
('blkio_delay_total', 'Q'), # 8
('swapin_count', 'Q'), # 8
('swapin_delay_total', 'Q'), # 8
('cpu_run_real_total', 'Q'), # 8
('cpu_run_virtual_total', 'Q'), # 8
('ac_comm', '32s'), # 32 +++ 112
('ac_sched', 'B'), # 1
('__pad', '3x'), # 1 --- 8 (!)
('ac_uid', 'I'), # 4 +++ 120
('ac_gid', 'I'), # 4
('ac_pid', 'I'), # 4
('ac_ppid', 'I'), # 4
('ac_btime', 'I'), # 4 +++ 136
('ac_etime', 'Q'), # 8 +++ 144
('ac_utime', 'Q'), # 8
('ac_stime', 'Q'), # 8
('ac_minflt', 'Q'), # 8
('ac_majflt', 'Q'), # 8
('coremem', 'Q'), # 8
('virtmem', 'Q'), # 8
('hiwater_rss', 'Q'), # 8
('hiwater_vm', 'Q'), # 8
('read_char', 'Q'), # 8
('write_char', 'Q'), # 8
('read_syscalls', 'Q'), # 8
('write_syscalls', 'Q'), # 8
('read_bytes', 'Q'), # ...
('write_bytes', 'Q'),
('cancelled_write_bytes', 'Q'),
('nvcsw', 'Q'),
('nivcsw', 'Q'),
('ac_utimescaled', 'Q'),
('ac_stimescaled', 'Q'),
('cpu_scaled_run_real_total', 'Q'))
def decode(self):
nla.decode(self)
self['ac_comm'] = self['ac_comm'][:self['ac_comm'].find('\0')]
class taskstatsmsg(genlmsg):
nla_map = (('TASKSTATS_TYPE_UNSPEC', 'none'),
('TASKSTATS_TYPE_PID', 'uint32'),
('TASKSTATS_TYPE_TGID', 'uint32'),
('TASKSTATS_TYPE_STATS', 'stats'),
('TASKSTATS_TYPE_AGGR_PID', 'aggr_pid'),
('TASKSTATS_TYPE_AGGR_TGID', 'aggr_tgid'))
class stats(tstats):
pass # FIXME: optimize me!
class aggr_id(nla):
nla_map = (('TASKSTATS_TYPE_UNSPEC', 'none'),
('TASKSTATS_TYPE_PID', 'uint32'),
('TASKSTATS_TYPE_TGID', 'uint32'),
('TASKSTATS_TYPE_STATS', 'stats'))
class stats(tstats):
pass
class aggr_pid(aggr_id):
pass
class aggr_tgid(aggr_id):
pass
class TaskStats(GenericNetlinkSocket):
def __init__(self):
GenericNetlinkSocket.__init__(self)
def bind(self):
GenericNetlinkSocket.bind(self, 'TASKSTATS', taskstatsmsg)
def get_pid_stat(self, pid):
'''
Get taskstats for a process. Pid should be an integer.
'''
msg = tcmd()
msg['cmd'] = TASKSTATS_CMD_GET
msg['version'] = 1
msg['attrs'].append(['TASKSTATS_CMD_ATTR_PID', pid])
return self.nlm_request(msg,
self.prid,
msg_flags=NLM_F_REQUEST)
def _register_mask(self, cmd, mask):
msg = tcmd()
msg['cmd'] = TASKSTATS_CMD_GET
msg['version'] = 1
msg['attrs'].append([cmd, mask])
# there is no response to this request
self.put(msg,
self.prid,
msg_flags=NLM_F_REQUEST)
def register_mask(self, mask):
'''
Start the accounting for a processors by a mask. Mask is
a string, e.g.::
0,1 -- first two CPUs
0-4,6-10 -- CPUs from 0 to 4 and from 6 to 10
Though the kernel has a procedure, that cleans up accounting,
when it is not used, it is recommended to run deregister_mask()
before process exit.
'''
self._register_mask('TASKSTATS_CMD_ATTR_REGISTER_CPUMASK',
mask)
def deregister_mask(self, mask):
'''
Stop the accounting.
'''
self._register_mask('TASKSTATS_CMD_ATTR_DEREGISTER_CPUMASK',
mask)
| apache-2.0 |
Mhynlo/SickRage | lib/hachoir_core/field/bit_field.py | 95 | 1777 | """
Bit sized classes:
- Bit: Single bit, value is False or True ;
- Bits: Integer with a size in bits ;
- RawBits: unknown content with a size in bits.
"""
from hachoir_core.field import Field
from hachoir_core.i18n import _
from hachoir_core import config
class RawBits(Field):
"""
Unknown content with a size in bits.
"""
static_size = staticmethod(lambda *args, **kw: args[1])
def __init__(self, parent, name, size, description=None):
"""
Constructor: see L{Field.__init__} for parameter description
"""
Field.__init__(self, parent, name, size, description)
def hasValue(self):
return True
def createValue(self):
return self._parent.stream.readBits(
self.absolute_address, self._size, self._parent.endian)
def createDisplay(self):
if self._size < config.max_bit_length:
return unicode(self.value)
else:
return _("<%s size=%u>" %
(self.__class__.__name__, self._size))
createRawDisplay = createDisplay
class Bits(RawBits):
"""
Positive integer with a size in bits
@see: L{Bit}
@see: L{RawBits}
"""
pass
class Bit(RawBits):
"""
Single bit: value can be False or True, and size is exactly one bit.
@see: L{Bits}
"""
static_size = 1
def __init__(self, parent, name, description=None):
"""
Constructor: see L{Field.__init__} for parameter description
"""
RawBits.__init__(self, parent, name, 1, description=description)
def createValue(self):
return 1 == self._parent.stream.readBits(
self.absolute_address, 1, self._parent.endian)
def createRawDisplay(self):
return unicode(int(self.value))
| gpl-3.0 |
JurassicWordExcel/core | wizards/com/sun/star/wizards/web/FTPDialogResources.py | 7 | 3303 | #
# This file is part of the LibreOffice project.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This file incorporates work covered by the following license notice:
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed
# with this work for additional information regarding copyright
# ownership. The ASF licenses this file to you under the Apache
# License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0 .
class FTPDialogResources(object):
RID_FTPDIALOG_START = 4200
RID_COMMON_START = 500
def __init__(self, oWizardResource):
super(FTPDialogResources, self).__init__(xmsf, self.MODULE_NAME)
self.resFTPDialog_title = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 0)
self.reslblUsername_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 1)
self.reslblPassword_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 2)
self.resbtnConnect_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 3)
self.resln1_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 4)
self.reslblFTPAddress_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 5)
self.resln2_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 6)
self.resln3_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 7)
self.resbtnDir_value = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 8)
self.resFTPDisconnected = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 9)
self.resFTPConnected = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 10)
self.resFTPUserPwdWrong = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 11)
self.resFTPServerNotFound = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 12)
self.resFTPRights = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 13)
self.resFTPHostUnreachable = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 14)
self.resFTPUnknownError = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 15)
self.resFTPDirectory = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 16)
self.resIllegalFolder = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 17)
self.resConnecting = oWizardResource.getResText(
self.RID_FTPDIALOG_START + 18)
self.resbtnCancel_value = oWizardResource.getResText(
self.RID_COMMON_START + 11)
self.resbtnOK_value = oWizardResource.getResText(
self.RID_COMMON_START + 18)
self.resbtnHelp_value = oWizardResource.getResText(
self.RID_COMMON_START + 15)
self.restxtDir_value = "/"
| mpl-2.0 |
peterfpeterson/mantid | scripts/test/Calibration/test_tube_calib.py | 3 | 6550 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
# Standard and third-party
import numpy as np
from numpy.testing import assert_allclose
from os import path
import unittest
# Mantid import
from mantid import config
from mantid.api import AnalysisDataService, mtd
from mantid.simpleapi import DeleteWorkspaces, LoadNexusProcessed
# Calibration imports
from Calibration.tube_calib import correct_tube_to_ideal_tube, getCalibratedPixelPositions
class TestTubeCalib(unittest.TestCase):
@classmethod
def setUpClass(cls): # called only before running all tests in the test case
cls.workspaces_temporary = list() # workspaces to be deleted at tear-down
# Single tube data. Tube dimensions appropriate for a CORELLI tube
def y_quad(n: float) -> float:
r"""
Example quadratic function, returning the Y-coordinate (meters) versus pixel index `i
y_quad(n) = c0 + c1 * n + c2 * n^2.
Coefficients c0, c1, and c2 obtained by solving the following equations:
y(0) = -0.502
y(128) = 0.001
y(255) = 0.393 # assume a tube with 256 pixels
Obtaining:
c0 = -0.502
c1 = 0.00435287724834028
c2 = -3.306169908908442e-06
:param n: pixel coordinate
"""
return -0.502 + 0.00435287724834028 * n -3.306169908908442e-06 * n * n
# assume 11 slits(wires) casting 11 peaks(shadows) onto the tube at the following pixel numbers
tube_points = np.linspace(5, 245, 11, endpoint=True) # 5, 29, 53,...,221, 245
# assume the Y-coordinates of the peaks(shadows) given by our quadratic example function
ideal_tube_coordinates = [y_quad(n) for n in tube_points]
cls.y_quad_data = {'detector_count': 256,
'peak_count': 11,
'y_quad': y_quad,
'coefficients': {'A0': -0.502, 'A1': 0.00435287724834028, 'A2': -3.306169908908442e-06},
'tube_points': tube_points,
'ideal_tube_coordinates': ideal_tube_coordinates}
# Load a CORELLI file containing data for bank number 20 (16 tubes)
config.appendDataSearchSubDir('CORELLI/calibration')
for directory in config.getDataSearchDirs():
if 'UnitTest' in directory:
data_dir = path.join(directory, 'CORELLI', 'calibration')
break
workspace = 'CORELLI_123455_bank20'
LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace)
assert AnalysisDataService.doesExist(workspace)
cls.workspaces_temporary.append(workspace) # delete workspace at tear-down
cls.corelli = {'tube_length': 0.900466, # in meters
'pixels_per_tube': 256,
'workspace': workspace}
@classmethod
def tearDownClass(cls) -> None: # called only after all tests in the test case have run
r"""Delete the workspaces associated to the test cases"""
if len(cls.workspaces_temporary) > 0:
DeleteWorkspaces(cls.workspaces_temporary)
def test_correct_tube_to_ideal_tube(self):
# Verify the quadratic fit works
data = self.y_quad_data
# fit the Y-coordinates to the pixel positions with a default quadratic function
fitted_coordinates = correct_tube_to_ideal_tube(data['tube_points'],
data['ideal_tube_coordinates'],
data['detector_count'],
parameters_table='parameters')
# Verify the fitted coordinates are the ideal_tube_coordinates
assert_allclose([fitted_coordinates[int(n)] for n in data['tube_points']],
data['ideal_tube_coordinates'], atol=0.0001)
# Compare fitting coefficients
assert AnalysisDataService.doesExist('parameters')
# here retrieve the fitting coefficients from the 'parameters' table and compare to the expected values
expected = data['coefficients']
for row in mtd['parameters']:
if row['Name'] in expected:
self.assertAlmostEqual(row['Value'], expected[row['Name']], places=6)
# a bit of clean-up
DeleteWorkspaces(['PolyFittingWorkspace', 'QF_NormalisedCovarianceMatrix',
'QF_Parameters', 'QF_Workspace', 'parameters'])
def test_getCalibratedPixelPositions(self):
data = self.y_quad_data
# calibrate the first tube of bank 20 in the corelli input workspace
detector_ids, detector_positions = \
getCalibratedPixelPositions(self.corelli['workspace'],
data['tube_points'],
data['ideal_tube_coordinates'],
range(0, self.corelli['pixels_per_tube']), # first 256 workspace indexes
parameters_table='parameters')
# 77824 is the detector ID for the first pixel in bank 20
self.assertEqual(detector_ids, list(range(77824, 77824 + self.corelli['pixels_per_tube'])))
# Assert the detector positions were adjusted to the input quadratic function
y_quad = data['y_quad']
expected_y = (-1.30686 - y_quad(0)) + np.array([y_quad(n) for n in range(0, self.corelli['pixels_per_tube'])])
assert_allclose([xyz[1] for xyz in detector_positions], expected_y, atol=0.0001)
# here retrieve the fitting coefficients from the 'parameters' table and compare to the expected values
assert AnalysisDataService.doesExist('parameters')
expected = data['coefficients']
for row in mtd['parameters']:
if row['Name'] in expected:
self.assertAlmostEqual(row['Value'], expected[row['Name']], places=6)
# a bit of clean-up
DeleteWorkspaces(['parameters', 'PolyFittingWorkspace', 'QF_NormalisedCovarianceMatrix',
'QF_Parameters', 'QF_Workspace'])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
henryfjordan/django | tests/m2m_through_regress/models.py | 273 | 2771 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Forward declared intermediate model
@python_2_unicode_compatible
class Membership(models.Model):
person = models.ForeignKey('Person', models.CASCADE)
group = models.ForeignKey('Group', models.CASCADE)
price = models.IntegerField(default=100)
def __str__(self):
return "%s is a member of %s" % (self.person.name, self.group.name)
# using custom id column to test ticket #11107
@python_2_unicode_compatible
class UserMembership(models.Model):
id = models.AutoField(db_column='usermembership_id', primary_key=True)
user = models.ForeignKey(User, models.CASCADE)
group = models.ForeignKey('Group', models.CASCADE)
price = models.IntegerField(default=100)
def __str__(self):
return "%s is a user and member of %s" % (self.user.username, self.group.name)
@python_2_unicode_compatible
class Person(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Group(models.Model):
name = models.CharField(max_length=128)
# Membership object defined as a class
members = models.ManyToManyField(Person, through=Membership)
user_members = models.ManyToManyField(User, through='UserMembership')
def __str__(self):
return self.name
# A set of models that use an non-abstract inherited model as the 'through' model.
class A(models.Model):
a_text = models.CharField(max_length=20)
class ThroughBase(models.Model):
a = models.ForeignKey(A, models.CASCADE)
b = models.ForeignKey('B', models.CASCADE)
class Through(ThroughBase):
extra = models.CharField(max_length=20)
class B(models.Model):
b_text = models.CharField(max_length=20)
a_list = models.ManyToManyField(A, through=Through)
# Using to_field on the through model
@python_2_unicode_compatible
class Car(models.Model):
make = models.CharField(max_length=20, unique=True, null=True)
drivers = models.ManyToManyField('Driver', through='CarDriver')
def __str__(self):
return "%s" % self.make
@python_2_unicode_compatible
class Driver(models.Model):
name = models.CharField(max_length=20, unique=True, null=True)
def __str__(self):
return "%s" % self.name
class Meta:
ordering = ('name',)
@python_2_unicode_compatible
class CarDriver(models.Model):
car = models.ForeignKey('Car', models.CASCADE, to_field='make')
driver = models.ForeignKey('Driver', models.CASCADE, to_field='name')
def __str__(self):
return "pk=%s car=%s driver=%s" % (str(self.pk), self.car, self.driver)
| bsd-3-clause |
midori1/midorinoblog | site-packages/django/contrib/messages/storage/fallback.py | 704 | 2172 | from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import CookieStorage
from django.contrib.messages.storage.session import SessionStorage
class FallbackStorage(BaseStorage):
"""
Tries to store all messages in the first backend, storing any unstored
messages in each subsequent backend backend.
"""
storage_classes = (CookieStorage, SessionStorage)
def __init__(self, *args, **kwargs):
super(FallbackStorage, self).__init__(*args, **kwargs)
self.storages = [storage_class(*args, **kwargs)
for storage_class in self.storage_classes]
self._used_storages = set()
def _get(self, *args, **kwargs):
"""
Gets a single list of messages from all storage backends.
"""
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
# If the backend hasn't been used, no more retrieval is necessary.
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)
# If this storage class contained all the messages, no further
# retrieval is necessary
if all_retrieved:
break
return all_messages, all_retrieved
def _store(self, messages, response, *args, **kwargs):
"""
Stores the messages, returning any unstored messages after trying all
backends.
For each storage backend, any messages not stored are passed on to the
next backend.
"""
for storage in self.storages:
if messages:
messages = storage._store(messages, response,
remove_oldest=False)
# Even if there are no more messages, continue iterating to ensure
# storages which contained messages are flushed.
elif storage in self._used_storages:
storage._store([], response)
self._used_storages.remove(storage)
return messages
| apache-2.0 |
druuu/django | tests/template_tests/filter_tests/test_unordered_list.py | 204 | 8179 | from django.template.defaultfilters import unordered_list
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe
from ..utils import setup
class UnorderedListTests(SimpleTestCase):
@setup({'unordered_list01': '{{ a|unordered_list }}'})
def test_unordered_list01(self):
output = self.engine.render_to_string('unordered_list01', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'unordered_list02': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list02(self):
output = self.engine.render_to_string('unordered_list02', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list03': '{{ a|unordered_list }}'})
def test_unordered_list03(self):
output = self.engine.render_to_string('unordered_list03', {'a': ['x>', [mark_safe('<y')]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list04': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list04(self):
output = self.engine.render_to_string('unordered_list04', {'a': ['x>', [mark_safe('<y')]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list05': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list05(self):
output = self.engine.render_to_string('unordered_list05', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@ignore_warnings(category=RemovedInDjango110Warning)
class DeprecatedUnorderedListSyntaxTests(SimpleTestCase):
@setup({'unordered_list01': '{{ a|unordered_list }}'})
def test_unordered_list01(self):
output = self.engine.render_to_string('unordered_list01', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list02': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list02(self):
output = self.engine.render_to_string('unordered_list02', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list03': '{{ a|unordered_list }}'})
def test_unordered_list03(self):
output = self.engine.render_to_string('unordered_list03', {'a': ['x>', [[mark_safe('<y'), []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list04': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list04(self):
output = self.engine.render_to_string('unordered_list04', {'a': ['x>', [[mark_safe('<y'), []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list05': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list05(self):
output = self.engine.render_to_string('unordered_list05', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
class FunctionTests(SimpleTestCase):
def test_list(self):
self.assertEqual(unordered_list(['item 1', 'item 2']), '\t<li>item 1</li>\n\t<li>item 2</li>')
def test_nested(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1']]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>',
)
def test_nested2(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1', 'item1.2'], 'item 2']),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item1.2'
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>',
)
def test_nested3(self):
self.assertEqual(
unordered_list(['item 1', 'item 2', ['item 2.1']]),
'\t<li>item 1</li>\n\t<li>item 2\n\t<ul>\n\t\t<li>item 2.1'
'</li>\n\t</ul>\n\t</li>',
)
def test_nested_multiple(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1', ['item 1.1.1', ['item 1.1.1.1']]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>',
)
def test_nested_multiple2(self):
self.assertEqual(
unordered_list(['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>'
'Lawrence</li>\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>'
'\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>',
)
def test_autoescape(self):
self.assertEqual(
unordered_list(['<a>item 1</a>', 'item 2']),
'\t<li><a>item 1</a></li>\n\t<li>item 2</li>',
)
def test_autoescape_off(self):
self.assertEqual(
unordered_list(['<a>item 1</a>', 'item 2'], autoescape=False),
'\t<li><a>item 1</a></li>\n\t<li>item 2</li>',
)
def test_ulitem(self):
@python_2_unicode_compatible
class ULItem(object):
def __init__(self, title):
self.title = title
def __str__(self):
return 'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
c = ULItem('<a>c</a>')
self.assertEqual(
unordered_list([a, b, c]),
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>\n\t<li>ulitem-<a>c</a></li>',
)
def item_generator():
yield a
yield b
yield c
self.assertEqual(
unordered_list(item_generator()),
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>\n\t<li>ulitem-<a>c</a></li>',
)
def test_ulitem_autoescape_off(self):
@python_2_unicode_compatible
class ULItem(object):
def __init__(self, title):
self.title = title
def __str__(self):
return 'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
c = ULItem('<a>c</a>')
self.assertEqual(
unordered_list([a, b, c], autoescape=False),
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>\n\t<li>ulitem-<a>c</a></li>',
)
def item_generator():
yield a
yield b
yield c
self.assertEqual(
unordered_list(item_generator(), autoescape=False),
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>\n\t<li>ulitem-<a>c</a></li>',
)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_legacy(self):
"""
Old format for unordered lists should still work
"""
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
self.assertEqual(
unordered_list(['item 1', [['item 1.1', []]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>',
)
self.assertEqual(
unordered_list(['item 1', [['item 1.1', []],
['item 1.2', []]]]), '\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1'
'</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>',
)
self.assertEqual(
unordered_list(['States', [['Kansas', [['Lawrence', []], ['Topeka', []]]], ['Illinois', []]]]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>Lawrence</li>'
'\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>',
)
| bsd-3-clause |
jostep/tensorflow | tensorflow/contrib/bayesflow/python/kernel_tests/stochastic_graph_test.py | 76 | 10239 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for stochastic graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import distributions as distributions_lib
from tensorflow.contrib.bayesflow.python.ops import stochastic_graph_impl
from tensorflow.contrib.bayesflow.python.ops import stochastic_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
st = stochastic_tensor
sg = stochastic_graph_impl
distributions = distributions_lib
class NormalNotParam(distributions.Normal):
@property
def reparameterization_type(self):
return distributions.NOT_REPARAMETERIZED
class TestSurrogateLosses(test.TestCase):
def testPathwiseDerivativeDoesNotAddSurrogateLosses(self):
with self.test_session():
mu = [0.0, 0.1, 0.2]
sigma = constant_op.constant([1.1, 1.2, 1.3])
with st.value_type(st.SampleValue()):
prior = st.StochasticTensor(distributions.Normal(loc=mu, scale=sigma))
likelihood = st.StochasticTensor(
distributions.Normal(
loc=prior, scale=sigma))
self.assertEqual(
prior.distribution.reparameterization_type,
distributions.FULLY_REPARAMETERIZED)
self.assertEqual(
likelihood.distribution.reparameterization_type,
distributions.FULLY_REPARAMETERIZED)
loss = math_ops.square(array_ops.identity(likelihood) - [0.0, 0.1, 0.2])
sum_loss = math_ops.reduce_sum(loss)
surrogate_loss = sg.surrogate_loss([loss])
with self.assertRaisesRegexp(ValueError, "dimensionality 1 or greater"):
_ = sg.surrogate_loss([sum_loss])
surrogate_from_both = sg.surrogate_loss(
[loss, sum_loss * array_ops.ones_like(loss)])
# Pathwise derivative terms do not require add'l surrogate loss terms.
with self.test_session() as sess:
self.assertAllClose(*sess.run([loss, surrogate_loss]))
self.assertAllClose(*sess.run([(loss + sum_loss), surrogate_from_both]))
def _testSurrogateLoss(self, session, losses, expected_addl_terms, xs):
surrogate_loss = sg.surrogate_loss(losses)
expected_surrogate_loss = math_ops.add_n(losses + expected_addl_terms)
self.assertAllClose(*session.run([surrogate_loss, expected_surrogate_loss]))
# Test backprop
expected_grads = gradients_impl.gradients(ys=expected_surrogate_loss, xs=xs)
surrogate_grads = gradients_impl.gradients(ys=surrogate_loss, xs=xs)
self.assertEqual(len(expected_grads), len(surrogate_grads))
grad_values = session.run(expected_grads + surrogate_grads)
n_grad = len(expected_grads)
self.assertAllClose(grad_values[:n_grad], grad_values[n_grad:])
def testSurrogateLoss(self):
with self.test_session() as sess:
mu = constant_op.constant([0.0, 0.1, 0.2])
sigma = constant_op.constant([1.1, 1.2, 1.3])
with st.value_type(st.SampleValue()):
prior = st.StochasticTensor(NormalNotParam(loc=mu, scale=sigma))
likelihood = st.StochasticTensor(NormalNotParam(loc=prior, scale=sigma))
prior_2 = st.StochasticTensor(NormalNotParam(loc=mu, scale=sigma))
loss = math_ops.square(array_ops.identity(likelihood) - mu)
part_loss = math_ops.square(array_ops.identity(prior) - mu)
sum_loss = math_ops.reduce_sum(loss)
loss_nodeps = math_ops.square(array_ops.identity(prior_2) - mu)
# For ground truth, use the stop-gradient versions of the losses
loss_nograd = array_ops.stop_gradient(loss)
loss_nodeps_nograd = array_ops.stop_gradient(loss_nodeps)
sum_loss_nograd = array_ops.stop_gradient(sum_loss)
# These score functions should ignore prior_2
self._testSurrogateLoss(
session=sess,
losses=[loss],
expected_addl_terms=[
likelihood.distribution.log_prob(
likelihood.value()) * loss_nograd,
prior.distribution.log_prob(prior.value()) * loss_nograd
],
xs=[mu, sigma])
self._testSurrogateLoss(
session=sess,
losses=[loss, part_loss],
expected_addl_terms=[
likelihood.distribution.log_prob(
likelihood.value()) * loss_nograd,
(prior.distribution.log_prob(prior.value()) *
array_ops.stop_gradient(part_loss + loss))
],
xs=[mu, sigma])
self._testSurrogateLoss(
session=sess,
losses=[sum_loss * array_ops.ones_like(loss)],
expected_addl_terms=[(
likelihood.distribution.log_prob(likelihood.value()) *
sum_loss_nograd), prior.distribution.log_prob(prior.value()) *
sum_loss_nograd],
xs=[mu, sigma])
self._testSurrogateLoss(
session=sess,
losses=[loss, sum_loss * array_ops.ones_like(loss)],
expected_addl_terms=[(
likelihood.distribution.log_prob(likelihood.value()) *
array_ops.stop_gradient(loss + sum_loss)),
(prior.distribution.log_prob(prior.value()) *
array_ops.stop_gradient(loss + sum_loss))],
xs=[mu, sigma])
# These score functions should ignore prior and likelihood
self._testSurrogateLoss(
session=sess,
losses=[loss_nodeps],
expected_addl_terms=[(prior_2.distribution.log_prob(prior_2.value()) *
loss_nodeps_nograd)],
xs=[mu, sigma])
# These score functions should include all terms selectively
self._testSurrogateLoss(
session=sess,
losses=[loss, loss_nodeps],
# We can't guarantee ordering of output losses in this case.
expected_addl_terms=[(
likelihood.distribution.log_prob(likelihood.value()) *
loss_nograd), prior.distribution.log_prob(prior.value()) *
loss_nograd,
(prior_2.distribution.log_prob(prior_2.value()) *
loss_nodeps_nograd)],
xs=[mu, sigma])
def testNoSurrogateLoss(self):
with self.test_session():
mu = constant_op.constant([0.0, 0.1, 0.2])
sigma = constant_op.constant([1.1, 1.2, 1.3])
with st.value_type(st.SampleValue()):
dt = st.StochasticTensor(
NormalNotParam(
loc=mu, scale=sigma), loss_fn=None)
self.assertEqual(None, dt.loss(constant_op.constant([2.0])))
def testExplicitStochasticTensors(self):
with self.test_session() as sess:
mu = constant_op.constant([0.0, 0.1, 0.2])
sigma = constant_op.constant([1.1, 1.2, 1.3])
with st.value_type(st.SampleValue()):
dt1 = st.StochasticTensor(NormalNotParam(loc=mu, scale=sigma))
dt2 = st.StochasticTensor(NormalNotParam(loc=mu, scale=sigma))
loss = math_ops.square(array_ops.identity(dt1)) + 10. + dt2
sl_all = sg.surrogate_loss([loss])
sl_dt1 = sg.surrogate_loss([loss], stochastic_tensors=[dt1])
sl_dt2 = sg.surrogate_loss([loss], stochastic_tensors=[dt2])
dt1_term = dt1.distribution.log_prob(dt1) * loss
dt2_term = dt2.distribution.log_prob(dt2) * loss
self.assertAllClose(*sess.run(
[sl_all, sum([loss, dt1_term, dt2_term])]))
self.assertAllClose(*sess.run([sl_dt1, sum([loss, dt1_term])]))
self.assertAllClose(*sess.run([sl_dt2, sum([loss, dt2_term])]))
class StochasticDependenciesMapTest(test.TestCase):
def testBuildsMapOfUpstreamNodes(self):
dt1 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
dt2 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
out1 = dt1.value() + 1.
out2 = dt2.value() + 2.
x = out1 + out2
y = out2 * 3.
dep_map = sg._stochastic_dependencies_map([x, y])
self.assertEqual(dep_map[dt1], set([x]))
self.assertEqual(dep_map[dt2], set([x, y]))
def testHandlesStackedStochasticNodes(self):
dt1 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
out1 = dt1.value() + 1.
dt2 = st.StochasticTensor(distributions.Normal(loc=out1, scale=1.))
x = dt2.value() + 2.
dt3 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
y = dt3.value() * 3.
dep_map = sg._stochastic_dependencies_map([x, y])
self.assertEqual(dep_map[dt1], set([x]))
self.assertEqual(dep_map[dt2], set([x]))
self.assertEqual(dep_map[dt3], set([y]))
def testTraversesControlInputs(self):
dt1 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
logits = dt1.value() * 3.
dt2 = st.StochasticTensor(distributions.Bernoulli(logits=logits))
dt3 = st.StochasticTensor(distributions.Normal(loc=0., scale=1.))
x = dt3.value()
y = array_ops.ones((2, 2)) * 4.
z = array_ops.ones((2, 2)) * 3.
out = control_flow_ops.cond(
math_ops.cast(dt2, dtypes.bool), lambda: math_ops.add(x, y),
lambda: math_ops.square(z))
out += 5.
dep_map = sg._stochastic_dependencies_map([out])
self.assertEqual(dep_map[dt1], set([out]))
self.assertEqual(dep_map[dt2], set([out]))
self.assertEqual(dep_map[dt3], set([out]))
if __name__ == "__main__":
test.main()
| apache-2.0 |
anpingli/openshift-ansible | roles/lib_openshift/src/class/oc_group.py | 16 | 4507 | # pylint: skip-file
# flake8: noqa
class OCGroup(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'group'
def __init__(self,
config,
verbose=False):
''' Constructor for OCGroup '''
super(OCGroup, self).__init__(config.namespace, config.kubeconfig)
self.config = config
self.namespace = config.namespace
self._group = None
@property
def group(self):
''' property function service'''
if not self._group:
self.get()
return self._group
@group.setter
def group(self, data):
''' setter function for yedit var '''
self._group = data
def exists(self):
''' return whether a group exists '''
if self.group:
return True
return False
def get(self):
'''return group information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.group = Group(content=result['results'][0])
elif 'groups \"{}\" not found'.format(self.config.name) in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
return self._replace_content(self.kind, self.config.name, self.config.data)
def needs_update(self):
''' verify an update is needed '''
return not Utils.check_def_equal(self.config.data, self.group.yaml_dict, skip_keys=['users'], debug=True)
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params, check_mode=False):
'''run the idempotent ansible code'''
gconfig = GroupConfig(params['name'],
params['namespace'],
params['kubeconfig'],
)
oc_group = OCGroup(gconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_group.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': state}
########
# Delete
########
if state == 'absent':
if oc_group.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a delete.'}
api_rval = oc_group.delete()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
return {'changed': False, 'state': state}
if state == 'present':
########
# Create
########
if not oc_group.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'}
# Create it here
api_rval = oc_group.create()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_group.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
########
# Update
########
if oc_group.needs_update():
api_rval = oc_group.update()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_group.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
return {'changed': False, 'results': api_rval, 'state': state}
return {'failed': True, 'msg': 'Unknown state passed. {}'.format(state)}
| apache-2.0 |
quarckster/cfme_tests | cfme/utils/ansible.py | 7 | 12206 | import tempfile
from os import listdir, mkdir, makedirs, path
from shutil import copy, copyfile, rmtree
from subprocess import check_output, CalledProcessError, STDOUT
import sys
from fauxfactory import gen_alphanumeric
from cfme.utils import conf
from cfme.utils.providers import providers_data
from cfme.utils.appliance import current_appliance
from git import Repo
from yaml import load, dump
local_git_repo = "manageiq_ansible_module"
yml_path = path.join(path.dirname(__file__), local_git_repo)
yml_templates_path = path.join(path.dirname(__file__), 'ansible_conf')
basic_script = "basic_script.yml"
yml = ".yml"
random_token = str(gen_alphanumeric(906))
random_miq_user = str(gen_alphanumeric(8))
pulled_repo_library_path = path.join(local_git_repo, 'library')
remote_git_repo_url = "git://github.com/dkorn/manageiq-ansible-module.git"
def create_tmp_directory():
global lib_path
lib_path = tempfile.mkdtemp()
lib_sub_path = 'ansible_conf'
lib_sub_path_library = path.join(lib_sub_path, 'library')
makedirs(path.join((lib_path), lib_sub_path_library))
global library_path_to_copy_to
global basic_yml_path
library_path_to_copy_to = path.join(lib_path, lib_sub_path_library)
basic_yml_path = path.join(lib_path, lib_sub_path)
def fetch_miq_ansible_module():
if path.isdir(local_git_repo):
rmtree(local_git_repo)
mkdir(local_git_repo)
if path.isdir(library_path_to_copy_to):
rmtree(library_path_to_copy_to)
mkdir(library_path_to_copy_to)
Repo.clone_from(remote_git_repo_url, local_git_repo)
src_files = listdir(pulled_repo_library_path)
for file_name in src_files:
full_file_name = path.join(pulled_repo_library_path, file_name)
if path.isfile(full_file_name):
copy(full_file_name, library_path_to_copy_to)
rmtree(local_git_repo)
def get_values_for_providers_test(provider):
return {
'name': provider.name,
'state': 'present',
'miq_url': config_formatter(),
'miq_username': conf.credentials['default'].username,
'miq_password': conf.credentials['default'].password,
'provider_api_hostname': providers_data[provider.name]['endpoints']['default'].hostname,
'provider_api_port': providers_data[provider.name]['endpoints']['default'].api_port,
'provider_api_auth_token': providers_data[provider.name]['endpoints']['default'].token,
'monitoring_hostname': providers_data[provider.name]['endpoints']['hawkular'].hostname,
'monitoring_port': providers_data[provider.name]['endpoints']['hawkular'].api_port
}
def get_values_for_users_test():
return {
'fullname': 'MIQUser',
'name': 'MIQU',
'password': 'smartvm',
'state': 'present',
'miq_url': config_formatter(),
'miq_username': conf.credentials['default'].username,
'miq_password': conf.credentials['default'].password,
}
def get_values_for_custom_attributes_test(provider):
return {
'entity_type': 'provider',
'entity_name': conf.cfme_data.get('management_systems', {})
[provider.key].get('name', []),
'miq_url': config_formatter(),
'miq_username': conf.credentials['default'].username,
'miq_password': conf.credentials['default'].password,
}
def get_values_for_tags_test(provider):
return {
'resource': 'provider',
'resource_name': provider.name,
'miq_url': config_formatter(),
'miq_username': conf.credentials['default'].username,
'miq_password': conf.credentials['default'].password,
}
def get_values_from_conf(provider, script_type):
if script_type == 'providers':
return get_values_for_providers_test(provider)
if script_type == 'users':
return get_values_for_users_test()
if script_type == 'custom_attributes':
return get_values_for_custom_attributes_test(provider)
if script_type == 'tags':
return get_values_for_tags_test(provider)
# TODO Avoid reading files every time
def read_yml(script, value):
with open(yml_path + script + yml, 'r') as f:
doc = load(f)
return doc[0]['tasks'][0]['manageiq_provider'][value]
def get_yml_value(script, value):
with open(path.join(basic_yml_path, script) + yml, 'r') as f:
doc = load(f)
return doc[0]['tasks'][0]['manageiq_provider'][value]
def setup_basic_script(provider, script_type):
script_path_source = path.join(yml_templates_path, script_type + "_" + basic_script)
script_path = path.join(basic_yml_path, script_type + "_" + basic_script)
copyfile(script_path_source, script_path)
with open(script_path, 'rw') as f:
doc = load(f)
values_dict = get_values_from_conf(provider, script_type)
for key in values_dict:
if script_type == 'providers':
doc[0]['tasks'][0]['manageiq_provider'][key] = values_dict[key]
elif script_type == 'users':
doc[0]['tasks'][0]['manageiq_user'][key] = values_dict[key]
elif script_type == 'custom_attributes':
doc[0]['tasks'][0]['manageiq_custom_attributes'][key] = values_dict[key]
elif script_type == 'tags':
doc[0]['tasks'][0]['manageiq_tag_assignment'][key] = values_dict[key]
with open(script_path, 'w') as f:
f.write(dump(doc))
def open_yml(script, script_type):
copyfile((path.join(basic_yml_path, script_type + "_" + basic_script)),
path.join(basic_yml_path, script + yml))
with open(path.join(basic_yml_path, script + yml), 'rw') as f:
return load(f)
def write_yml(script, doc):
with open(path.join(basic_yml_path, script + yml), 'w') as f:
f.write(dump(doc))
def setup_ansible_script(provider, script, script_type=None, values_to_update=None):
# This function prepares the ansible scripts to work with the correct
# appliance configs that will be received from Jenkins
setup_basic_script(provider, script_type)
doc = open_yml(script, script_type)
if script == 'add_provider':
write_yml(script, doc)
if script == 'add_provider_ssl':
doc[0]['tasks'][0]['manageiq_provider']['provider_verify_ssl'] = 'True'
write_yml(script, doc)
elif script == 'update_provider':
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_provider'][key] = values_to_update[key]
write_yml(script, doc)
elif script == 'remove_provider':
doc[0]['tasks'][0]['manageiq_provider']['state'] = 'absent'
write_yml(script, doc)
elif script == 'remove_non_existing_provider':
doc[0]['tasks'][0]['manageiq_provider']['state'] = 'absent'
doc[0]['tasks'][0]['manageiq_provider']['name'] = random_miq_user
write_yml(script, doc)
elif script == 'remove_provider_bad_user':
doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user
write_yml(script, doc)
elif script == 'add_provider_bad_token':
doc[0]['tasks'][0]['manageiq_provider']['provider_api_auth_token'] = random_token
write_yml(script, doc)
elif script == 'add_provider_bad_user':
doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user
write_yml(script, doc)
elif script == 'update_non_existing_provider':
doc[0]['tasks'][0]['manageiq_provider']['provider_api_hostname'] = random_miq_user
write_yml(script, doc)
elif script == 'update_provider_bad_user':
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_provider'][key] = values_to_update[key]
doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user
write_yml(script, doc)
elif script == 'create_user':
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key]
write_yml(script, doc)
elif script == 'update_user':
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key]
write_yml(script, doc)
elif script == 'create_user_bad_user_name':
doc[0]['tasks'][0]['manageiq_user']['miq_username'] = random_miq_user
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key]
write_yml(script, doc)
elif script == 'delete_user':
doc[0]['tasks'][0]['manageiq_user']['name'] = values_to_update
doc[0]['tasks'][0]['manageiq_user']['state'] = 'absent'
write_yml(script, doc)
elif script == 'add_custom_attributes':
count = 0
while count < len(values_to_update):
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_custom_attributes']['custom_attributes'][count] = key
count += 1
write_yml(script, doc)
elif script == 'add_custom_attributes_bad_user':
doc[0]['tasks'][0]['manageiq_custom_attributes']['miq_username'] = str(random_miq_user)
write_yml(script, doc)
elif script == 'remove_custom_attributes':
count = 0
doc[0]['tasks'][0]['manageiq_custom_attributes']['state'] = 'absent'
while count < len(values_to_update):
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_custom_attributes']['custom_attributes'][count] = key
count += 1
write_yml(script, doc)
elif script == 'add_tags':
count = 0
while count < len(values_to_update):
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['category'] = \
values_to_update[count]['category']
doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['name'] = \
values_to_update[count]['name']
count += 1
doc[0]['tasks'][0]['manageiq_tag_assignment']['state'] = 'present'
write_yml(script, doc)
elif script == 'remove_tags':
count = 0
while count < len(values_to_update):
for key in values_to_update:
doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['category'] = \
values_to_update[count]['category']
doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['name'] = \
values_to_update[count]['name']
count += 1
doc[0]['tasks'][0]['manageiq_tag_assignment']['state'] = 'absent'
write_yml(script, doc)
def run_ansible(script):
ansible_playbook_cmd = "ansible-playbook -e ansible_python_interpreter="
interpreter_path = sys.executable
script_path = path.join(basic_yml_path, script + ".yml")
cmd = '{}{} {}'.format(ansible_playbook_cmd, interpreter_path, script_path)
return run_cmd(cmd)
def run_cmd(cmd):
try:
response = check_output(cmd, shell=True, stderr=STDOUT)
except CalledProcessError as exc:
print("Status : FAIL", exc.returncode, exc.output)
return exc.output
else:
print("Output: \n{}\n".format(response))
# TODO For further usage with reply statuses test. Not being used at the moment
def reply_status(reply):
ok_status = reply['stats']['localhost']['ok']
changed_status = reply['stats']['localhost']['changed']
failures_status = reply['stats']['localhost']['failures']
skipped_status = reply['stats']['localhost']['skipped']
message_status = reply['plays'][0]['tasks'][2]['hosts']['localhost']['result']['msg']
if not ok_status == '0':
ok_status = 'OK'
else:
ok_status = 'Failed'
if changed_status:
return 'Changed', message_status, ok_status
elif skipped_status:
return 'Skipped', message_status, ok_status
elif failures_status:
return 'Failed', message_status, ok_status
else:
return 'No Change', message_status, ok_status
def config_formatter(appliance=None):
appliance = appliance or current_appliance()
return appliance.url
def remove_tmp_files():
rmtree(lib_path, ignore_errors=True)
| gpl-2.0 |
jcftang/ansible | contrib/inventory/openshift.py | 196 | 3274 | #!/usr/bin/env python
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
inventory: openshift
short_description: Openshift gears external inventory script
description:
- Generates inventory of Openshift gears using the REST interface
- this permit to reuse playbook to setup an Openshift gear
version_added: None
author: Michael Scherer
'''
try:
import json
except ImportError:
import simplejson as json
import os
import os.path
import sys
import ConfigParser
import StringIO
from ansible.module_utils.urls import open_url
configparser = None
def get_from_rhc_config(variable):
global configparser
CONF_FILE = os.path.expanduser('~/.openshift/express.conf')
if os.path.exists(CONF_FILE):
if not configparser:
ini_str = '[root]\n' + open(CONF_FILE, 'r').read()
configparser = ConfigParser.SafeConfigParser()
configparser.readfp(StringIO.StringIO(ini_str))
try:
return configparser.get('root', variable)
except ConfigParser.NoOptionError:
return None
def get_config(env_var, config_var):
result = os.getenv(env_var)
if not result:
result = get_from_rhc_config(config_var)
if not result:
sys.exit("failed=True msg='missing %s'" % env_var)
return result
def get_json_from_api(url, username, password):
headers = {'Accept': 'application/json; version=1.5'}
response = open_url(url, headers=headers, url_username=username, url_password=password)
return json.loads(response.read())['data']
username = get_config('ANSIBLE_OPENSHIFT_USERNAME', 'default_rhlogin')
password = get_config('ANSIBLE_OPENSHIFT_PASSWORD', 'password')
broker_url = 'https://%s/broker/rest/' % get_config('ANSIBLE_OPENSHIFT_BROKER', 'libra_server')
response = get_json_from_api(broker_url + '/domains', username, password)
response = get_json_from_api("%s/domains/%s/applications" %
(broker_url, response[0]['id']), username, password)
result = {}
for app in response:
# ssh://520311404832ce3e570000ff@blog-johndoe.example.org
(user, host) = app['ssh_url'][6:].split('@')
app_name = host.split('-')[0]
result[app_name] = {}
result[app_name]['hosts'] = []
result[app_name]['hosts'].append(host)
result[app_name]['vars'] = {}
result[app_name]['vars']['ansible_ssh_user'] = user
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({}))
else:
print("Need an argument, either --list or --host <host>")
| gpl-3.0 |
davidharrigan/django | tests/template_tests/filter_tests/test_make_list.py | 345 | 1611 | from django.template.defaultfilters import make_list
from django.test import SimpleTestCase
from django.test.utils import str_prefix
from django.utils.safestring import mark_safe
from ..utils import setup
class MakeListTests(SimpleTestCase):
"""
The make_list filter can destroy existing escaping, so the results are
escaped.
"""
@setup({'make_list01': '{% autoescape off %}{{ a|make_list }}{% endautoescape %}'})
def test_make_list01(self):
output = self.engine.render_to_string('make_list01', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list02': '{{ a|make_list }}'})
def test_make_list02(self):
output = self.engine.render_to_string('make_list02', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list03':
'{% autoescape off %}{{ a|make_list|stringformat:"s"|safe }}{% endautoescape %}'})
def test_make_list03(self):
output = self.engine.render_to_string('make_list03', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list04': '{{ a|make_list|stringformat:"s"|safe }}'})
def test_make_list04(self):
output = self.engine.render_to_string('make_list04', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
class FunctionTests(SimpleTestCase):
def test_string(self):
self.assertEqual(make_list('abc'), ['a', 'b', 'c'])
def test_integer(self):
self.assertEqual(make_list(1234), ['1', '2', '3', '4'])
| bsd-3-clause |
astropy/astropy | astropy/table/pprint.py | 2 | 28489 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import sys
import re
import fnmatch
import numpy as np
from astropy import log
from astropy.utils.console import Getch, color_print, terminal_size, conf
from astropy.utils.data_info import dtype_info_name
__all__ = []
def default_format_func(format_, val):
if isinstance(val, bytes):
return val.decode('utf-8', errors='replace')
else:
return str(val)
# The first three functions are helpers for _auto_format_func
def _use_str_for_masked_values(format_func):
"""Wrap format function to trap masked values.
String format functions and most user functions will not be able to deal
with masked values, so we wrap them to ensure they are passed to str().
"""
return lambda format_, val: (str(val) if val is np.ma.masked
else format_func(format_, val))
def _possible_string_format_functions(format_):
"""Iterate through possible string-derived format functions.
A string can either be a format specifier for the format built-in,
a new-style format string, or an old-style format string.
"""
yield lambda format_, val: format(val, format_)
yield lambda format_, val: format_.format(val)
yield lambda format_, val: format_ % val
def get_auto_format_func(
col=None,
possible_string_format_functions=_possible_string_format_functions):
"""
Return a wrapped ``auto_format_func`` function which is used in
formatting table columns. This is primarily an internal function but
gets used directly in other parts of astropy, e.g. `astropy.io.ascii`.
Parameters
----------
col_name : object, optional
Hashable object to identify column like id or name. Default is None.
possible_string_format_functions : func, optional
Function that yields possible string formatting functions
(defaults to internal function to do this).
Returns
-------
Wrapped ``auto_format_func`` function
"""
def _auto_format_func(format_, val):
"""Format ``val`` according to ``format_`` for a plain format specifier,
old- or new-style format strings, or using a user supplied function.
More importantly, determine and cache (in _format_funcs) a function
that will do this subsequently. In this way this complicated logic is
only done for the first value.
Returns the formatted value.
"""
if format_ is None:
return default_format_func(format_, val)
if format_ in col.info._format_funcs:
return col.info._format_funcs[format_](format_, val)
if callable(format_):
format_func = lambda format_, val: format_(val) # noqa
try:
out = format_func(format_, val)
if not isinstance(out, str):
raise ValueError('Format function for value {} returned {} '
'instead of string type'
.format(val, type(val)))
except Exception as err:
# For a masked element, the format function call likely failed
# to handle it. Just return the string representation for now,
# and retry when a non-masked value comes along.
if val is np.ma.masked:
return str(val)
raise ValueError(f'Format function for value {val} failed: {err}')
# If the user-supplied function handles formatting masked elements, use
# it directly. Otherwise, wrap it in a function that traps them.
try:
format_func(format_, np.ma.masked)
except Exception:
format_func = _use_str_for_masked_values(format_func)
else:
# For a masked element, we cannot set string-based format functions yet,
# as all tests below will fail. Just return the string representation
# of masked for now, and retry when a non-masked value comes along.
if val is np.ma.masked:
return str(val)
for format_func in possible_string_format_functions(format_):
try:
# Does this string format method work?
out = format_func(format_, val)
# Require that the format statement actually did something.
if out == format_:
raise ValueError('the format passed in did nothing.')
except Exception:
continue
else:
break
else:
# None of the possible string functions passed muster.
raise ValueError('unable to parse format string {} for its '
'column.'.format(format_))
# String-based format functions will fail on masked elements;
# wrap them in a function that traps them.
format_func = _use_str_for_masked_values(format_func)
col.info._format_funcs[format_] = format_func
return out
return _auto_format_func
def _get_pprint_include_names(table):
"""Get the set of names to show in pprint from the table pprint_include_names
and pprint_exclude_names attributes.
These may be fnmatch unix-style globs.
"""
def get_matches(name_globs, default):
match_names = set()
if name_globs: # For None or () use the default
for name in table.colnames:
for name_glob in name_globs:
if fnmatch.fnmatch(name, name_glob):
match_names.add(name)
break
else:
match_names.update(default)
return match_names
include_names = get_matches(table.pprint_include_names(), table.colnames)
exclude_names = get_matches(table.pprint_exclude_names(), [])
return include_names - exclude_names
class TableFormatter:
@staticmethod
def _get_pprint_size(max_lines=None, max_width=None):
"""Get the output size (number of lines and character width) for Column and
Table pformat/pprint methods.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default will be determined
using the ``astropy.table.conf.max_lines`` configuration item. If a
negative value of ``max_lines`` is supplied then there is no line
limit applied.
The same applies for max_width except the configuration item is
``astropy.table.conf.max_width``.
Parameters
----------
max_lines : int or None
Maximum lines of output (header + data rows)
max_width : int or None
Maximum width (characters) output
Returns
-------
max_lines, max_width : int
"""
if max_lines is None:
max_lines = conf.max_lines
if max_width is None:
max_width = conf.max_width
if max_lines is None or max_width is None:
lines, width = terminal_size()
if max_lines is None:
max_lines = lines
elif max_lines < 0:
max_lines = sys.maxsize
if max_lines < 8:
max_lines = 8
if max_width is None:
max_width = width
elif max_width < 0:
max_width = sys.maxsize
if max_width < 10:
max_width = 10
return max_lines, max_width
def _pformat_col(self, col, max_lines=None, show_name=True, show_unit=None,
show_dtype=False, show_length=None, html=False, align=None):
"""Return a list of formatted string representation of column values.
Parameters
----------
max_lines : int
Maximum lines of output (header + data rows)
show_name : bool
Include column name. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include column dtype. Default is False.
show_length : bool
Include column length at end. Default is to show this only
if the column is not shown completely.
html : bool
Output column as HTML
align : str
Left/right alignment of columns. Default is '>' (right) for all
columns. Other allowed values are '<', '^', and '0=' for left,
centered, and 0-padded, respectively.
Returns
-------
lines : list
List of lines with formatted column values
outs : dict
Dict which is used to pass back additional values
defined within the iterator.
"""
if show_unit is None:
show_unit = col.info.unit is not None
outs = {} # Some values from _pformat_col_iter iterator that are needed here
col_strs_iter = self._pformat_col_iter(col, max_lines, show_name=show_name,
show_unit=show_unit,
show_dtype=show_dtype,
show_length=show_length,
outs=outs)
col_strs = list(col_strs_iter)
if len(col_strs) > 0:
col_width = max(len(x) for x in col_strs)
if html:
from astropy.utils.xml.writer import xml_escape
n_header = outs['n_header']
for i, col_str in enumerate(col_strs):
# _pformat_col output has a header line '----' which is not needed here
if i == n_header - 1:
continue
td = 'th' if i < n_header else 'td'
val = f'<{td}>{xml_escape(col_str.strip())}</{td}>'
row = ('<tr>' + val + '</tr>')
if i < n_header:
row = ('<thead>' + row + '</thead>')
col_strs[i] = row
if n_header > 0:
# Get rid of '---' header line
col_strs.pop(n_header - 1)
col_strs.insert(0, '<table>')
col_strs.append('</table>')
# Now bring all the column string values to the same fixed width
else:
col_width = max(len(x) for x in col_strs) if col_strs else 1
# Center line header content and generate dashed headerline
for i in outs['i_centers']:
col_strs[i] = col_strs[i].center(col_width)
if outs['i_dashes'] is not None:
col_strs[outs['i_dashes']] = '-' * col_width
# Format columns according to alignment. `align` arg has precedent, otherwise
# use `col.format` if it starts as a legal alignment string. If neither applies
# then right justify.
re_fill_align = re.compile(r'(?P<fill>.?)(?P<align>[<^>=])')
match = None
if align:
# If there is an align specified then it must match
match = re_fill_align.match(align)
if not match:
raise ValueError("column align must be one of '<', '^', '>', or '='")
elif isinstance(col.info.format, str):
# col.info.format need not match, in which case rjust gets used
match = re_fill_align.match(col.info.format)
if match:
fill_char = match.group('fill')
align_char = match.group('align')
if align_char == '=':
if fill_char != '0':
raise ValueError("fill character must be '0' for '=' align")
fill_char = '' # str.zfill gets used which does not take fill char arg
else:
fill_char = ''
align_char = '>'
justify_methods = {'<': 'ljust', '^': 'center', '>': 'rjust', '=': 'zfill'}
justify_method = justify_methods[align_char]
justify_args = (col_width, fill_char) if fill_char else (col_width,)
for i, col_str in enumerate(col_strs):
col_strs[i] = getattr(col_str, justify_method)(*justify_args)
if outs['show_length']:
col_strs.append(f'Length = {len(col)} rows')
return col_strs, outs
def _pformat_col_iter(self, col, max_lines, show_name, show_unit, outs,
show_dtype=False, show_length=None):
"""Iterator which yields formatted string representation of column values.
Parameters
----------
max_lines : int
Maximum lines of output (header + data rows)
show_name : bool
Include column name. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
outs : dict
Must be a dict which is used to pass back additional values
defined within the iterator.
show_dtype : bool
Include column dtype. Default is False.
show_length : bool
Include column length at end. Default is to show this only
if the column is not shown completely.
"""
max_lines, _ = self._get_pprint_size(max_lines, -1)
multidims = getattr(col, 'shape', [0])[1:]
if multidims:
multidim0 = tuple(0 for n in multidims)
multidim1 = tuple(n - 1 for n in multidims)
trivial_multidims = np.prod(multidims) == 1
i_dashes = None
i_centers = [] # Line indexes where content should be centered
n_header = 0
if show_name:
i_centers.append(n_header)
# Get column name (or 'None' if not set)
col_name = str(col.info.name)
if multidims:
col_name += f" [{','.join(str(n) for n in multidims)}]"
n_header += 1
yield col_name
if show_unit:
i_centers.append(n_header)
n_header += 1
yield str(col.info.unit or '')
if show_dtype:
i_centers.append(n_header)
n_header += 1
try:
dtype = dtype_info_name(col.dtype)
except AttributeError:
dtype = 'object'
yield str(dtype)
if show_unit or show_name or show_dtype:
i_dashes = n_header
n_header += 1
yield '---'
max_lines -= n_header
n_print2 = max_lines // 2
n_rows = len(col)
# This block of code is responsible for producing the function that
# will format values for this column. The ``format_func`` function
# takes two args (col_format, val) and returns the string-formatted
# version. Some points to understand:
#
# - col_format could itself be the formatting function, so it will
# actually end up being called with itself as the first arg. In
# this case the function is expected to ignore its first arg.
#
# - auto_format_func is a function that gets called on the first
# column value that is being formatted. It then determines an
# appropriate formatting function given the actual value to be
# formatted. This might be deterministic or it might involve
# try/except. The latter allows for different string formatting
# options like %f or {:5.3f}. When auto_format_func is called it:
# 1. Caches the function in the _format_funcs dict so for subsequent
# values the right function is called right away.
# 2. Returns the formatted value.
#
# - possible_string_format_functions is a function that yields a
# succession of functions that might successfully format the
# value. There is a default, but Mixin methods can override this.
# See Quantity for an example.
#
# - get_auto_format_func() returns a wrapped version of auto_format_func
# with the column id and possible_string_format_functions as
# enclosed variables.
col_format = col.info.format or getattr(col.info, 'default_format',
None)
pssf = (getattr(col.info, 'possible_string_format_functions', None)
or _possible_string_format_functions)
auto_format_func = get_auto_format_func(col, pssf)
format_func = col.info._format_funcs.get(col_format, auto_format_func)
if len(col) > max_lines:
if show_length is None:
show_length = True
i0 = n_print2 - (1 if show_length else 0)
i1 = n_rows - n_print2 - max_lines % 2
indices = np.concatenate([np.arange(0, i0 + 1),
np.arange(i1 + 1, len(col))])
else:
i0 = -1
indices = np.arange(len(col))
def format_col_str(idx):
if multidims:
# Prevents columns like Column(data=[[(1,)],[(2,)]], name='a')
# with shape (n,1,...,1) from being printed as if there was
# more than one element in a row
if trivial_multidims:
return format_func(col_format, col[(idx,) + multidim0])
else:
left = format_func(col_format, col[(idx,) + multidim0])
right = format_func(col_format, col[(idx,) + multidim1])
return f'{left} .. {right}'
else:
return format_func(col_format, col[idx])
# Add formatted values if within bounds allowed by max_lines
for idx in indices:
if idx == i0:
yield '...'
else:
try:
yield format_col_str(idx)
except ValueError:
raise ValueError(
'Unable to parse format string "{}" for entry "{}" '
'in column "{}"'.format(col_format, col[idx],
col.info.name))
outs['show_length'] = show_length
outs['n_header'] = n_header
outs['i_centers'] = i_centers
outs['i_dashes'] = i_dashes
def _pformat_table(self, table, max_lines=None, max_width=None,
show_name=True, show_unit=None, show_dtype=False,
html=False, tableid=None, tableclass=None, align=None):
"""Return a list of lines for the formatted string representation of
the table.
Parameters
----------
max_lines : int or None
Maximum number of rows to output
max_width : int or None
Maximum character width of output
show_name : bool
Include a header row for column names. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include a header row for column dtypes. Default is False.
html : bool
Format the output as an HTML table. Default is False.
tableid : str or None
An ID tag for the table; only used if html is set. Default is
"table{id}", where id is the unique integer id of the table object,
id(table)
tableclass : str or list of str or None
CSS classes for the table; only used if html is set. Default is
none
align : str or list or tuple
Left/right alignment of columns. Default is '>' (right) for all
columns. Other allowed values are '<', '^', and '0=' for left,
centered, and 0-padded, respectively. A list of strings can be
provided for alignment of tables with multiple columns.
Returns
-------
rows : list
Formatted table as a list of strings
outs : dict
Dict which is used to pass back additional values
defined within the iterator.
"""
# "Print" all the values into temporary lists by column for subsequent
# use and to determine the width
max_lines, max_width = self._get_pprint_size(max_lines, max_width)
cols = []
if show_unit is None:
show_unit = any(col.info.unit for col in table.columns.values())
# Coerce align into a correctly-sized list of alignments (if possible)
n_cols = len(table.columns)
if align is None or isinstance(align, str):
align = [align] * n_cols
elif isinstance(align, (list, tuple)):
if len(align) != n_cols:
raise ValueError('got {} alignment values instead of '
'the number of columns ({})'
.format(len(align), n_cols))
else:
raise TypeError('align keyword must be str or list or tuple (got {})'
.format(type(align)))
# Process column visibility from table pprint_include_names and
# pprint_exclude_names attributes and get the set of columns to show.
pprint_include_names = _get_pprint_include_names(table)
for align_, col in zip(align, table.columns.values()):
if col.info.name not in pprint_include_names:
continue
lines, outs = self._pformat_col(col, max_lines, show_name=show_name,
show_unit=show_unit, show_dtype=show_dtype,
align=align_)
if outs['show_length']:
lines = lines[:-1]
cols.append(lines)
if not cols:
return ['<No columns>'], {'show_length': False}
# Use the values for the last column since they are all the same
n_header = outs['n_header']
n_rows = len(cols[0])
def outwidth(cols):
return sum(len(c[0]) for c in cols) + len(cols) - 1
dots_col = ['...'] * n_rows
middle = len(cols) // 2
while outwidth(cols) > max_width:
if len(cols) == 1:
break
if len(cols) == 2:
cols[1] = dots_col
break
if cols[middle] is dots_col:
cols.pop(middle)
middle = len(cols) // 2
cols[middle] = dots_col
# Now "print" the (already-stringified) column values into a
# row-oriented list.
rows = []
if html:
from astropy.utils.xml.writer import xml_escape
if tableid is None:
tableid = f'table{id(table)}'
if tableclass is not None:
if isinstance(tableclass, list):
tableclass = ' '.join(tableclass)
rows.append(f'<table id="{tableid}" class="{tableclass}">')
else:
rows.append(f'<table id="{tableid}">')
for i in range(n_rows):
# _pformat_col output has a header line '----' which is not needed here
if i == n_header - 1:
continue
td = 'th' if i < n_header else 'td'
vals = (f'<{td}>{xml_escape(col[i].strip())}</{td}>'
for col in cols)
row = ('<tr>' + ''.join(vals) + '</tr>')
if i < n_header:
row = ('<thead>' + row + '</thead>')
rows.append(row)
rows.append('</table>')
else:
for i in range(n_rows):
row = ' '.join(col[i] for col in cols)
rows.append(row)
return rows, outs
def _more_tabcol(self, tabcol, max_lines=None, max_width=None,
show_name=True, show_unit=None, show_dtype=False):
"""Interactive "more" of a table or column.
Parameters
----------
max_lines : int or None
Maximum number of rows to output
max_width : int or None
Maximum character width of output
show_name : bool
Include a header row for column names. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include a header row for column dtypes. Default is False.
"""
allowed_keys = 'f br<>qhpn'
# Count the header lines
n_header = 0
if show_name:
n_header += 1
if show_unit:
n_header += 1
if show_dtype:
n_header += 1
if show_name or show_unit or show_dtype:
n_header += 1
# Set up kwargs for pformat call. Only Table gets max_width.
kwargs = dict(max_lines=-1, show_name=show_name, show_unit=show_unit,
show_dtype=show_dtype)
if hasattr(tabcol, 'columns'): # tabcol is a table
kwargs['max_width'] = max_width
# If max_lines is None (=> query screen size) then increase by 2.
# This is because get_pprint_size leaves 6 extra lines so that in
# ipython you normally see the last input line.
max_lines1, max_width = self._get_pprint_size(max_lines, max_width)
if max_lines is None:
max_lines1 += 2
delta_lines = max_lines1 - n_header
# Set up a function to get a single character on any platform
inkey = Getch()
i0 = 0 # First table/column row to show
showlines = True
while True:
i1 = i0 + delta_lines # Last table/col row to show
if showlines: # Don't always show the table (e.g. after help)
try:
os.system('cls' if os.name == 'nt' else 'clear')
except Exception:
pass # No worries if clear screen call fails
lines = tabcol[i0:i1].pformat(**kwargs)
colors = ('red' if i < n_header else 'default'
for i in range(len(lines)))
for color, line in zip(colors, lines):
color_print(line, color)
showlines = True
print()
print("-- f, <space>, b, r, p, n, <, >, q h (help) --", end=' ')
# Get a valid key
while True:
try:
key = inkey().lower()
except Exception:
print("\n")
log.error('Console does not support getting a character'
' as required by more(). Use pprint() instead.')
return
if key in allowed_keys:
break
print(key)
if key.lower() == 'q':
break
elif key == ' ' or key == 'f':
i0 += delta_lines
elif key == 'b':
i0 = i0 - delta_lines
elif key == 'r':
pass
elif key == '<':
i0 = 0
elif key == '>':
i0 = len(tabcol)
elif key == 'p':
i0 -= 1
elif key == 'n':
i0 += 1
elif key == 'h':
showlines = False
print("""
Browsing keys:
f, <space> : forward one page
b : back one page
r : refresh same page
n : next row
p : previous row
< : go to beginning
> : go to end
q : quit browsing
h : print this help""", end=' ')
if i0 < 0:
i0 = 0
if i0 >= len(tabcol) - delta_lines:
i0 = len(tabcol) - delta_lines
print("\n")
| bsd-3-clause |
shsingh/ansible | lib/ansible/plugins/callback/default.py | 16 | 18632 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: default
type: stdout
short_description: default Ansible screen output
version_added: historical
description:
- This is the default output callback for ansible-playbook.
extends_documentation_fragment:
- default_callback
requirements:
- set as stdout in configuration
options:
check_mode_markers:
name: Show markers when running in check mode
description:
- "Toggle to control displaying markers when running in check mode. The markers are C(DRY RUN)
at the beggining and ending of playbook execution (when calling C(ansible-playbook --check))
and C(CHECK MODE) as a suffix at every play and task that is run in check mode."
type: bool
default: no
version_added: 2.9
env:
- name: ANSIBLE_CHECK_MODE_MARKERS
ini:
- key: check_mode_markers
section: defaults
'''
# NOTE: check_mode_markers functionality is also implemented in the following derived plugins:
# debug.py, yaml.py, dense.py. Maybe their documentation needs updating, too.
from ansible import constants as C
from ansible import context
from ansible.playbook.task_include import TaskInclude
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
# These values use ansible.constants for historical reasons, mostly to allow
# unmodified derivative plugins to work. However, newer options added to the
# plugin are not also added to ansible.constants, so authors of derivative
# callback plugins will eventually need to add a reference to the common docs
# fragment for the 'default' callback plugin
# these are used to provide backwards compat with old plugins that subclass from default
# but still don't use the new config system and/or fail to document the options
# TODO: Change the default of check_mode_markers to True in a future release (2.13)
COMPAT_OPTIONS = (('display_skipped_hosts', C.DISPLAY_SKIPPED_HOSTS),
('display_ok_hosts', True),
('show_custom_stats', C.SHOW_CUSTOM_STATS),
('display_failed_stderr', False),
('check_mode_markers', False),)
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'default'
def __init__(self):
self._play = None
self._last_task_banner = None
self._last_task_name = None
self._task_type_cache = {}
super(CallbackModule, self).__init__()
def set_options(self, task_keys=None, var_options=None, direct=None):
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
# for backwards compat with plugins subclassing default, fallback to constants
for option, constant in COMPAT_OPTIONS:
try:
value = self.get_option(option)
except (AttributeError, KeyError):
value = constant
setattr(self, option, value)
def v2_runner_on_failed(self, result, ignore_errors=False):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._handle_exception(result._result, use_stderr=self.display_failed_stderr)
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if delegated_vars:
self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'],
self._dump_results(result._result)),
color=C.COLOR_ERROR, stderr=self.display_failed_stderr)
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)),
color=C.COLOR_ERROR, stderr=self.display_failed_stderr)
if ignore_errors:
self._display.display("...ignoring", color=C.COLOR_SKIP)
def v2_runner_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if isinstance(result._task, TaskInclude):
return
elif result._result.get('changed', False):
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
color = C.COLOR_CHANGED
else:
if not self.display_ok_hosts:
return
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
color = C.COLOR_OK
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
self._clean_results(result._result, result._task.action)
if self._run_is_verbose(result):
msg += " => %s" % (self._dump_results(result._result),)
self._display.display(msg, color=color)
def v2_runner_on_skipped(self, result):
if self.display_skipped_hosts:
self._clean_results(result._result, result._task.action)
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
msg = "skipping: [%s]" % result._host.get_name()
if self._run_is_verbose(result):
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_runner_on_unreachable(self, result):
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if delegated_vars:
msg = "fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result))
else:
msg = "fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result))
self._display.display(msg, color=C.COLOR_UNREACHABLE, stderr=self.display_failed_stderr)
def v2_playbook_on_no_hosts_matched(self):
self._display.display("skipping: no hosts matched", color=C.COLOR_SKIP)
def v2_playbook_on_no_hosts_remaining(self):
self._display.banner("NO MORE HOSTS LEFT")
def v2_playbook_on_task_start(self, task, is_conditional):
self._task_start(task, prefix='TASK')
def _task_start(self, task, prefix=None):
# Cache output prefix for task if provided
# This is needed to properly display 'RUNNING HANDLER' and similar
# when hiding skipped/ok task results
if prefix is not None:
self._task_type_cache[task._uuid] = prefix
# Preserve task name, as all vars may not be available for templating
# when we need it later
if self._play.strategy == 'free':
# Explicitly set to None for strategy 'free' to account for any cached
# task title from a previous non-free play
self._last_task_name = None
else:
self._last_task_name = task.get_name().strip()
# Display the task banner immediately if we're not doing any filtering based on task result
if self.display_skipped_hosts and self.display_ok_hosts:
self._print_task_banner(task)
def _print_task_banner(self, task):
# args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target
# machine and we haven't run it thereyet at this time.
#
# So we give people a config option to affect display of the args so
# that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc).
args = ''
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = u', '.join(u'%s=%s' % a for a in task.args.items())
args = u' %s' % args
prefix = self._task_type_cache.get(task._uuid, 'TASK')
# Use cached task name
task_name = self._last_task_name
if task_name is None:
task_name = task.get_name().strip()
if task.check_mode and self.check_mode_markers:
checkmsg = " [CHECK MODE]"
else:
checkmsg = ""
self._display.banner(u"%s [%s%s]%s" % (prefix, task_name, args, checkmsg))
if self._display.verbosity >= 2:
path = task.get_path()
if path:
self._display.display(u"task path: %s" % path, color=C.COLOR_DEBUG)
self._last_task_banner = task._uuid
def v2_playbook_on_cleanup_task_start(self, task):
self._task_start(task, prefix='CLEANUP TASK')
def v2_playbook_on_handler_task_start(self, task):
self._task_start(task, prefix='RUNNING HANDLER')
def v2_runner_on_start(self, host, task):
if self.get_option('show_per_host_start'):
self._display.display(" [started %s on %s]" % (task, host), color=C.COLOR_OK)
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if play.check_mode and self.check_mode_markers:
checkmsg = " [CHECK MODE]"
else:
checkmsg = ""
if not name:
msg = u"PLAY%s" % checkmsg
else:
msg = u"PLAY [%s]%s" % (name, checkmsg)
self._play = play
self._display.banner(msg)
def v2_on_file_diff(self, result):
if result._task.loop and 'results' in result._result:
for res in result._result['results']:
if 'diff' in res and res['diff'] and res.get('changed', False):
diff = self._get_diff(res['diff'])
if diff:
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._display.display(diff)
elif 'diff' in result._result and result._result['diff'] and result._result.get('changed', False):
diff = self._get_diff(result._result['diff'])
if diff:
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._display.display(diff)
def v2_runner_item_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if isinstance(result._task, TaskInclude):
return
elif result._result.get('changed', False):
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
msg = 'changed'
color = C.COLOR_CHANGED
else:
if not self.display_ok_hosts:
return
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
msg = 'ok'
color = C.COLOR_OK
if delegated_vars:
msg += ": [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += ": [%s]" % result._host.get_name()
msg += " => (item=%s)" % (self._get_item_label(result._result),)
self._clean_results(result._result, result._task.action)
if self._run_is_verbose(result):
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
def v2_runner_item_on_failed(self, result):
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
self._handle_exception(result._result)
msg = "failed: "
if delegated_vars:
msg += "[%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += "[%s]" % (result._host.get_name())
self._handle_warnings(result._result)
self._display.display(msg + " (item=%s) => %s" % (self._get_item_label(result._result), self._dump_results(result._result)), color=C.COLOR_ERROR)
def v2_runner_item_on_skipped(self, result):
if self.display_skipped_hosts:
if self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._clean_results(result._result, result._task.action)
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item_label(result._result))
if self._run_is_verbose(result):
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_include(self, included_file):
msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts]))
if 'item' in included_file._args:
msg += " => (item=%s)" % (self._get_item_label(included_file._args),)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_stats(self, stats):
self._display.banner("PLAY RECAP")
hosts = sorted(stats.processed.keys())
for h in hosts:
t = stats.summarize(h)
self._display.display(
u"%s : %s %s %s %s %s %s %s" % (
hostcolor(h, t),
colorize(u'ok', t['ok'], C.COLOR_OK),
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
colorize(u'failed', t['failures'], C.COLOR_ERROR),
colorize(u'skipped', t['skipped'], C.COLOR_SKIP),
colorize(u'rescued', t['rescued'], C.COLOR_OK),
colorize(u'ignored', t['ignored'], C.COLOR_WARN),
),
screen_only=True
)
self._display.display(
u"%s : %s %s %s %s %s %s %s" % (
hostcolor(h, t, False),
colorize(u'ok', t['ok'], None),
colorize(u'changed', t['changed'], None),
colorize(u'unreachable', t['unreachable'], None),
colorize(u'failed', t['failures'], None),
colorize(u'skipped', t['skipped'], None),
colorize(u'rescued', t['rescued'], None),
colorize(u'ignored', t['ignored'], None),
),
log_only=True
)
self._display.display("", screen_only=True)
# print custom stats if required
if stats.custom and self.show_custom_stats:
self._display.banner("CUSTOM STATS: ")
# per host
# TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()):
if k == '_run':
continue
self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n', '')))
# print per run custom stats
if '_run' in stats.custom:
self._display.display("", screen_only=True)
self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n', ''))
self._display.display("", screen_only=True)
if context.CLIARGS['check'] and self.check_mode_markers:
self._display.banner("DRY RUN")
def v2_playbook_on_start(self, playbook):
if self._display.verbosity > 1:
from os.path import basename
self._display.banner("PLAYBOOK: %s" % basename(playbook._file_name))
# show CLI arguments
if self._display.verbosity > 3:
if context.CLIARGS.get('args'):
self._display.display('Positional arguments: %s' % ' '.join(context.CLIARGS['args']),
color=C.COLOR_VERBOSE, screen_only=True)
for argument in (a for a in context.CLIARGS if a != 'args'):
val = context.CLIARGS[argument]
if val:
self._display.display('%s: %s' % (argument, val), color=C.COLOR_VERBOSE, screen_only=True)
if context.CLIARGS['check'] and self.check_mode_markers:
self._display.banner("DRY RUN")
def v2_runner_retry(self, result):
task_name = result.task_name or result._task
msg = "FAILED - RETRYING: %s (%d retries left)." % (task_name, result._result['retries'] - result._result['attempts'])
if self._run_is_verbose(result, verbosity=2):
msg += "Result was: %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_DEBUG)
def v2_playbook_on_notify(self, handler, host):
if self._display.verbosity > 1:
self._display.display("NOTIFIED HANDLER %s for %s" % (handler.get_name(), host), color=C.COLOR_VERBOSE, screen_only=True)
| gpl-3.0 |
sanghinitin/golismero | golismero/api/text/text_utils.py | 8 | 7440 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Text manipulation utilities.
"""
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = [
"char_count", "line_count", "word_count", "generate_random_string",
"uncamelcase", "hexdump", "to_utf8", "split_first",
]
import re
from random import choice
from re import finditer
from string import ascii_letters, digits, printable
#------------------------------------------------------------------------------
def char_count(text):
"""
:param text: Text.
:type text: str
:returns: Number of printable characters in text.
:rtype: int
:raises: TypeError
"""
return sum(1 for _ in finditer(r"\w", text))
#------------------------------------------------------------------------------
def line_count(text):
"""
:param text: Text.
:type text: str
:returns: Number of lines in text.
:rtype: int
:raises: TypeError
"""
if not isinstance(text, basestring):
raise TypeError("Expected basestring, got '%s' instead" % type(text))
count = text.count("\n")
if not text.endswith("\n"):
count += 1
return count
#------------------------------------------------------------------------------
def word_count(text):
"""
:param text: Text.
:type text: str
:returns: Number of words in text.
:rtype: int
:raises: TypeError
"""
return sum(1 for _ in finditer(r"\w+", text))
#------------------------------------------------------------------------------
def generate_random_string(length = 30):
"""
Generates a random string of the specified length.
The key space used to generate random strings are:
- ASCII letters (both lowercase and uppercase).
- Digits (0-9).
>>> from golismero.api.text.text_utils import generate_random_string
>>> generate_random_string(10)
Asi91Ujsn5
>>> generate_random_string(30)
8KNLs981jc0h1ls8b2ks01bc7slgu2
:param length: Desired string length.
:type length: int
:raises: TypeError
"""
m_available_chars = ascii_letters + digits
return "".join(choice(m_available_chars) for _ in xrange(length))
#------------------------------------------------------------------------------
# Adapted from: http://stackoverflow.com/a/2560017/426293
__uncamelcase_re = re.compile("%s|%s|%s" % (
r"(?<=[A-Z])(?=[A-Z][a-z])",
r"(?<=[^A-Z])(?=[A-Z])",
r"(?<=[A-Za-z])(?=[^A-Za-z])",
))
def uncamelcase(string):
"""
Converts a CamelCase string into a human-readable string.
Examples::
>>> uncamelcase("lowercase")
'lowercase'
>>> uncamelcase("Class")
'Class'
>>> uncamelcase("MyClass")
'My Class'
>>> uncamelcase("HTML")
'HTML'
>>> uncamelcase("PDFLoader")
'PDF Loader'
>>> uncamelcase("AString")
'A String'
>>> uncamelcase("SimpleXMLParser")
'Simple XML Parser'
>>> uncamelcase("GL11Version")
'GL 11 Version'
>>> uncamelcase("99Bottles")
'99 Bottles'
>>> uncamelcase("May5")
'May 5'
>>> uncamelcase("BFG9000")
'BFG 9000'
:param string: CamelCase string.
:type string: str
:returns: Human-readable string.
:rtype: str
:raises: TypeError
"""
if not isinstance(string, basestring):
raise TypeError("Expected basestring, got '%s' instead" % type(string))
string = string.replace("_", " ")
string = __uncamelcase_re.sub(" ", string)
while " " in string:
string = string.replace(" ", " ")
return string
#------------------------------------------------------------------------------
def hexdump(s):
"""
Produce an hexadecimal output from a binary string.
:param s: Binary string to dump.
:type s: str
:returns: Hexadecimal output.
:rtype: str
:raises: TypeError
"""
if not isinstance(s, basestring):
raise TypeError("Expected basestring, got '%s' instead" % type(s))
a = []
for i in xrange(0, len(s), 16):
h1 = " ".join("%.2x" % ord(c) for c in s[i:i+8])
h2 = " ".join("%.2x" % ord(c) for c in s[i+8:i+16])
d = "".join(c if c in printable else "." for c in s[i:i+16])
a.append("%-32s-%-32s %s\n" % (h1, h2, d))
return "".join(a)
#------------------------------------------------------------------------------
def to_utf8(s):
"""
Convert the given Unicode string into an UTF-8 encoded string.
If the argument is already a normal Python string, nothing is done.
So this function can be used as a filter to normalize string arguments.
:param s: Unicode string to convert.
:type s: basestring
:returns: Converted string.
:rtype: str
"""
if isinstance(s, unicode):
return s.encode("UTF-8")
if type(s) is not str and isinstance(s, str):
return str(s)
return s
#------------------------------------------------------------------------------
# This function was borrowed from the urllib3 project.
#
# Urllib3 is copyright 2008-2012 Andrey Petrov and contributors (see
# CONTRIBUTORS.txt) and is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
# http://raw.github.com/shazow/urllib3/master/CONTRIBUTORS.txt
#
def split_first(s, delims):
"""
Given a string and an another delimiters as strings, split on the first found
delimiter. Return the two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example: ::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delimiters.
Not ideal for a large number of delimiters.
.. warning: This function was borrowed from the urllib3 project.
It may be removed in future versions of GoLismero.
:param s: string to delimit to.
:type s: str
:param delims: string with delimits characters
:type delims: str
:return: a tuple as format: (FIRST_OCCURRENCE, REST_OF_TEXT, MATCHING_CHAR)
:rtype: (str, str, str|None)
:raises: TypeError
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
| gpl-2.0 |
hortonworks/hortonworks-sandbox | desktop/core/ext-py/Django-1.2.3/tests/modeltests/many_to_one_null/models.py | 30 | 3262 | """
16. Many-to-one relationships that can be null
To define a many-to-one relationship that can have a null foreign key, use
``ForeignKey()`` with ``null=True`` .
"""
from django.db import models
class Reporter(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Article(models.Model):
headline = models.CharField(max_length=100)
reporter = models.ForeignKey(Reporter, null=True)
class Meta:
ordering = ('headline',)
def __unicode__(self):
return self.headline
__test__ = {'API_TESTS':"""
# Create a Reporter.
>>> r = Reporter(name='John Smith')
>>> r.save()
# Create an Article.
>>> a = Article(headline="First", reporter=r)
>>> a.save()
>>> a.reporter.id
1
>>> a.reporter
<Reporter: John Smith>
# Article objects have access to their related Reporter objects.
>>> r = a.reporter
# Create an Article via the Reporter object.
>>> a2 = r.article_set.create(headline="Second")
>>> a2
<Article: Second>
>>> a2.reporter.id
1
# Reporter objects have access to their related Article objects.
>>> r.article_set.all()
[<Article: First>, <Article: Second>]
>>> r.article_set.filter(headline__startswith='Fir')
[<Article: First>]
>>> r.article_set.count()
2
# Create an Article with no Reporter by passing "reporter=None".
>>> a3 = Article(headline="Third", reporter=None)
>>> a3.save()
>>> a3.id
3
>>> print a3.reporter
None
# Need to reget a3 to refresh the cache
>>> a3 = Article.objects.get(pk=3)
>>> print a3.reporter.id
Traceback (most recent call last):
...
AttributeError: 'NoneType' object has no attribute 'id'
# Accessing an article's 'reporter' attribute returns None
# if the reporter is set to None.
>>> print a3.reporter
None
# To retrieve the articles with no reporters set, use "reporter__isnull=True".
>>> Article.objects.filter(reporter__isnull=True)
[<Article: Third>]
# We can achieve the same thing by filtering for the case where the reporter is
# None.
>>> Article.objects.filter(reporter=None)
[<Article: Third>]
# Set the reporter for the Third article
>>> r.article_set.add(a3)
>>> r.article_set.all()
[<Article: First>, <Article: Second>, <Article: Third>]
# Remove an article from the set, and check that it was removed.
>>> r.article_set.remove(a3)
>>> r.article_set.all()
[<Article: First>, <Article: Second>]
>>> Article.objects.filter(reporter__isnull=True)
[<Article: Third>]
# Create another article and reporter
>>> r2 = Reporter(name='Paul Jones')
>>> r2.save()
>>> a4 = r2.article_set.create(headline='Fourth')
>>> r2.article_set.all()
[<Article: Fourth>]
# Try to remove a4 from a set it does not belong to
>>> r.article_set.remove(a4)
Traceback (most recent call last):
...
DoesNotExist: <Article: Fourth> is not related to <Reporter: John Smith>.
>>> r2.article_set.all()
[<Article: Fourth>]
# Use descriptor assignment to allocate ForeignKey. Null is legal, so
# existing members of set that are not in the assignment set are set null
>>> r2.article_set = [a2, a3]
>>> r2.article_set.all()
[<Article: Second>, <Article: Third>]
# Clear the rest of the set
>>> r.article_set.clear()
>>> r.article_set.all()
[]
>>> Article.objects.filter(reporter__isnull=True)
[<Article: First>, <Article: Fourth>]
"""}
| apache-2.0 |
cschenck/blender_sim | fluid_sim_deps/blender-2.69/2.69/python/lib/python3.3/site-packages/numpy/core/__init__.py | 1 | 1855 |
from .info import __doc__
from numpy.version import version as __version__
from . import multiarray
from . import umath
from . import _internal # for freeze programs
from . import numerictypes as nt
multiarray.set_typeDict(nt.sctypeDict)
from . import numeric
from .numeric import *
from . import fromnumeric
from .fromnumeric import *
from . import defchararray as char
from . import records as rec
from .records import *
from .memmap import *
from .defchararray import chararray
from . import scalarmath
from . import function_base
from .function_base import *
from . import machar
from .machar import *
from . import getlimits
from .getlimits import *
from . import shape_base
from .shape_base import *
del nt
from .fromnumeric import amax as max, amin as min, \
round_ as round
from .numeric import absolute as abs
__all__ = ['char','rec','memmap']
__all__ += numeric.__all__
__all__ += fromnumeric.__all__
__all__ += rec.__all__
__all__ += ['chararray']
__all__ += function_base.__all__
__all__ += machar.__all__
__all__ += getlimits.__all__
__all__ += shape_base.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
# Make it possible so that ufuncs can be pickled
# Here are the loading and unloading functions
# The name numpy.core._ufunc_reconstruct must be
# available for unpickling to work.
def _ufunc_reconstruct(module, name):
mod = __import__(module)
return getattr(mod, name)
def _ufunc_reduce(func):
from pickle import whichmodule
name = func.__name__
return _ufunc_reconstruct, (whichmodule(func,name), name)
import sys
if sys.version_info[0] < 3:
import copyreg as copyreg
else:
import copyreg
copyreg.pickle(ufunc, _ufunc_reduce, _ufunc_reconstruct)
# Unclutter namespace (must keep _ufunc_reconstruct for unpickling)
del copyreg
del sys
del _ufunc_reduce
| gpl-3.0 |
victorzhao/miniblink49 | third_party/WebKit/Tools/Scripts/webkitpy/common/webkit_finder.py | 39 | 5149 | # Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
class WebKitFinder(object):
def __init__(self, filesystem):
self._filesystem = filesystem
self._dirsep = filesystem.sep
self._sys_path = sys.path
self._env_path = os.environ['PATH'].split(os.pathsep)
self._webkit_base = None
self._chromium_base = None
self._depot_tools = None
def webkit_base(self):
"""Returns the absolute path to the top of the WebKit tree.
Raises an AssertionError if the top dir can't be determined."""
# Note: This code somewhat duplicates the code in
# scm.find_checkout_root(). However, that code only works if the top
# of the SCM repository also matches the top of the WebKit tree. Some SVN users
# (the chromium test bots, for example), might only check out subdirectories like
# Tools/Scripts. This code will also work if there is no SCM system at all.
if not self._webkit_base:
self._webkit_base = self._webkit_base
module_path = self._filesystem.abspath(self._filesystem.path_to_module(self.__module__))
tools_index = module_path.rfind('Tools')
assert tools_index != -1, "could not find location of this checkout from %s" % module_path
self._webkit_base = self._filesystem.normpath(module_path[0:tools_index - 1])
return self._webkit_base
def chromium_base(self):
if not self._chromium_base:
self._chromium_base = self._filesystem.dirname(self._filesystem.dirname(self.webkit_base()))
return self._chromium_base
def path_from_webkit_base(self, *comps):
return self._filesystem.join(self.webkit_base(), *comps)
def path_from_chromium_base(self, *comps):
return self._filesystem.join(self.chromium_base(), *comps)
def path_to_script(self, script_name):
"""Returns the relative path to the script from the top of the WebKit tree."""
# This is intentionally relative in order to force callers to consider what
# their current working directory is (and change to the top of the tree if necessary).
return self._filesystem.join("Tools", "Scripts", script_name)
def layout_tests_dir(self):
return self.path_from_webkit_base('LayoutTests')
def perf_tests_dir(self):
return self.path_from_webkit_base('PerformanceTests')
def depot_tools_base(self):
if not self._depot_tools:
# This basically duplicates src/tools/find_depot_tools.py without the side effects
# (adding the directory to sys.path and importing breakpad).
self._depot_tools = (self._check_paths_for_depot_tools(self._sys_path) or
self._check_paths_for_depot_tools(self._env_path) or
self._check_upward_for_depot_tools())
return self._depot_tools
def _check_paths_for_depot_tools(self, paths):
for path in paths:
if path.rstrip(self._dirsep).endswith('depot_tools'):
return path
return None
def _check_upward_for_depot_tools(self):
fs = self._filesystem
prev_dir = ''
current_dir = fs.dirname(self._webkit_base)
while current_dir != prev_dir:
if fs.exists(fs.join(current_dir, 'depot_tools', 'pylint.py')):
return fs.join(current_dir, 'depot_tools')
prev_dir = current_dir
current_dir = fs.dirname(current_dir)
def path_from_depot_tools_base(self, *comps):
return self._filesystem.join(self.depot_tools_base(), *comps)
| gpl-3.0 |
amyvmiwei/kbengine | kbe/src/lib/python/Lib/unittest/test/test_break.py | 81 | 9812 | import gc
import io
import os
import sys
import signal
import weakref
import unittest
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreak(unittest.TestCase):
int_handler = None
def setUp(self):
self._default_handler = signal.getsignal(signal.SIGINT)
if self.int_handler is not None:
signal.signal(signal.SIGINT, self.int_handler)
def tearDown(self):
signal.signal(signal.SIGINT, self._default_handler)
unittest.signals._results = weakref.WeakKeyDictionary()
unittest.signals._interrupt_handler = None
def testInstallHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(unittest.signals._interrupt_handler.called)
def testRegisterResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
for ref in unittest.signals._results:
if ref is result:
break
elif ref is not result:
self.fail("odd object in result set")
else:
self.fail("result not found")
def testInterruptCaught(self):
default_handler = signal.getsignal(signal.SIGINT)
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.breakCaught)
def testSecondInterrupt(self):
# Can't use skipIf decorator because the signal handler may have
# been changed after defining this method.
if signal.getsignal(signal.SIGINT) == signal.SIG_IGN:
self.skipTest("test requires SIGINT to not be ignored")
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
os.kill(pid, signal.SIGINT)
self.fail("Second KeyboardInterrupt not raised")
try:
test(result)
except KeyboardInterrupt:
pass
else:
self.fail("Second KeyboardInterrupt not raised")
self.assertTrue(result.breakCaught)
def testTwoResults(self):
unittest.installHandler()
result = unittest.TestResult()
unittest.registerResult(result)
new_handler = signal.getsignal(signal.SIGINT)
result2 = unittest.TestResult()
unittest.registerResult(result2)
self.assertEqual(signal.getsignal(signal.SIGINT), new_handler)
result3 = unittest.TestResult()
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.shouldStop)
self.assertTrue(result2.shouldStop)
self.assertFalse(result3.shouldStop)
def testHandlerReplacedButCalled(self):
# Can't use skipIf decorator because the signal handler may have
# been changed after defining this method.
if signal.getsignal(signal.SIGINT) == signal.SIG_IGN:
self.skipTest("test requires SIGINT to not be ignored")
# If our handler has been replaced (is no longer installed) but is
# called by the *new* handler, then it isn't safe to delay the
# SIGINT and we should immediately delegate to the default handler
unittest.installHandler()
handler = signal.getsignal(signal.SIGINT)
def new_handler(frame, signum):
handler(frame, signum)
signal.signal(signal.SIGINT, new_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
else:
self.fail("replaced but delegated handler doesn't raise interrupt")
def testRunner(self):
# Creating a TextTestRunner with the appropriate argument should
# register the TextTestResult it creates
runner = unittest.TextTestRunner(stream=io.StringIO())
result = runner.run(unittest.TestSuite())
self.assertIn(result, unittest.signals._results)
def testWeakReferences(self):
# Calling registerResult on a result should not keep it alive
result = unittest.TestResult()
unittest.registerResult(result)
ref = weakref.ref(result)
del result
# For non-reference counting implementations
gc.collect();gc.collect()
self.assertIsNone(ref())
def testRemoveResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
unittest.installHandler()
self.assertTrue(unittest.removeResult(result))
# Should this raise an error instead?
self.assertFalse(unittest.removeResult(unittest.TestResult()))
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
self.assertFalse(result.shouldStop)
def testMainInstallsHandler(self):
failfast = object()
test = object()
verbosity = object()
result = object()
default_handler = signal.getsignal(signal.SIGINT)
class FakeRunner(object):
initArgs = []
runArgs = []
def __init__(self, *args, **kwargs):
self.initArgs.append((args, kwargs))
def run(self, test):
self.runArgs.append(test)
return result
class Program(unittest.TestProgram):
def __init__(self, catchbreak):
self.exit = False
self.verbosity = verbosity
self.failfast = failfast
self.catchbreak = catchbreak
self.testRunner = FakeRunner
self.test = test
self.result = None
p = Program(False)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
FakeRunner.initArgs = []
FakeRunner.runArgs = []
p = Program(True)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
# check that calling removeHandler multiple times has no ill-effect
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandlerAsDecorator(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
@unittest.removeHandler
def test():
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
test()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreakDefaultIntHandler(TestBreak):
int_handler = signal.default_int_handler
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreakSignalIgnored(TestBreak):
int_handler = signal.SIG_IGN
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreakSignalDefault(TestBreak):
int_handler = signal.SIG_DFL
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
Instagram/django | tests/regressiontests/custom_columns_regress/tests.py | 91 | 2919 | from django.test import TestCase
from django.core.exceptions import FieldError
from models import Author, Article
def pks(objects):
""" Return pks to be able to compare lists"""
return [o.pk for o in objects]
class CustomColumnRegression(TestCase):
def assertRaisesMessage(self, exc, msg, func, *args, **kwargs):
try:
func(*args, **kwargs)
except Exception, e:
self.assertEqual(msg, str(e))
self.assertTrue(isinstance(e, exc), "Expected %s, got %s" % (exc, type(e)))
def setUp(self):
self.a1 = Author.objects.create(first_name='John', last_name='Smith')
self.a2 = Author.objects.create(first_name='Peter', last_name='Jones')
self.authors = [self.a1, self.a2]
def test_basic_creation(self):
art = Article(headline='Django lets you build Web apps easily', primary_author=self.a1)
art.save()
art.authors = [self.a1, self.a2]
def test_author_querying(self):
self.assertQuerysetEqual(
Author.objects.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
def test_author_filtering(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact='John'),
['<Author: John Smith>']
)
def test_author_get(self):
self.assertEqual(self.a1, Author.objects.get(first_name__exact='John'))
def test_filter_on_nonexistant_field(self):
self.assertRaisesMessage(
FieldError,
"Cannot resolve keyword 'firstname' into field. Choices are: Author_ID, article, first_name, last_name, primary_set",
Author.objects.filter,
firstname__exact='John'
)
def test_author_get_attributes(self):
a = Author.objects.get(last_name__exact='Smith')
self.assertEqual('John', a.first_name)
self.assertEqual('Smith', a.last_name)
self.assertRaisesMessage(
AttributeError,
"'Author' object has no attribute 'firstname'",
getattr,
a, 'firstname'
)
self.assertRaisesMessage(
AttributeError,
"'Author' object has no attribute 'last'",
getattr,
a, 'last'
)
def test_m2m_table(self):
art = Article.objects.create(headline='Django lets you build Web apps easily', primary_author=self.a1)
art.authors = self.authors
self.assertQuerysetEqual(
art.authors.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
self.assertQuerysetEqual(
self.a1.article_set.all(),
['<Article: Django lets you build Web apps easily>']
)
self.assertQuerysetEqual(
art.authors.filter(last_name='Jones'),
['<Author: Peter Jones>']
)
| bsd-3-clause |
google/nixysa | third_party/ply-3.1/doc/makedoc.py | 177 | 5862 | #!/usr/local/bin/python
###############################################################################
# Takes a chapter as input and adds internal links and numbering to all
# of the H1, H2, H3, H4 and H5 sections.
#
# Every heading HTML tag (H1, H2 etc) is given an autogenerated name to link
# to. However, if the name is not an autogenerated name from a previous run,
# it will be kept. If it is autogenerated, it might change on subsequent runs
# of this program. Thus if you want to create links to one of the headings,
# then change the heading link name to something that does not look like an
# autogenerated link name.
###############################################################################
import sys
import re
import string
###############################################################################
# Functions
###############################################################################
# Regexs for <a name="..."></a>
alink = re.compile(r"<a *name *= *\"(.*)\"></a>", re.IGNORECASE)
heading = re.compile(r"(_nn\d)", re.IGNORECASE)
def getheadingname(m):
autogeneratedheading = True;
if m.group(1) != None:
amatch = alink.match(m.group(1))
if amatch:
# A non-autogenerated heading - keep it
headingname = amatch.group(1)
autogeneratedheading = heading.match(headingname)
if autogeneratedheading:
# The heading name was either non-existent or autogenerated,
# We can create a new heading / change the existing heading
headingname = "%s_nn%d" % (filenamebase, nameindex)
return headingname
###############################################################################
# Main program
###############################################################################
if len(sys.argv) != 2:
print "usage: makedoc.py filename"
sys.exit(1)
filename = sys.argv[1]
filenamebase = string.split(filename,".")[0]
section = 0
subsection = 0
subsubsection = 0
subsubsubsection = 0
nameindex = 0
name = ""
# Regexs for <h1>,... <h5> sections
h1 = re.compile(r".*?<H1>(<a.*a>)*[\d\.\s]*(.*?)</H1>", re.IGNORECASE)
h2 = re.compile(r".*?<H2>(<a.*a>)*[\d\.\s]*(.*?)</H2>", re.IGNORECASE)
h3 = re.compile(r".*?<H3>(<a.*a>)*[\d\.\s]*(.*?)</H3>", re.IGNORECASE)
h4 = re.compile(r".*?<H4>(<a.*a>)*[\d\.\s]*(.*?)</H4>", re.IGNORECASE)
h5 = re.compile(r".*?<H5>(<a.*a>)*[\d\.\s]*(.*?)</H5>", re.IGNORECASE)
data = open(filename).read() # Read data
open(filename+".bak","w").write(data) # Make backup
lines = data.splitlines()
result = [ ] # This is the result of postprocessing the file
index = "<!-- INDEX -->\n<div class=\"sectiontoc\">\n" # index contains the index for adding at the top of the file. Also printed to stdout.
skip = 0
skipspace = 0
for s in lines:
if s == "<!-- INDEX -->":
if not skip:
result.append("@INDEX@")
skip = 1
else:
skip = 0
continue;
if skip:
continue
if not s and skipspace:
continue
if skipspace:
result.append("")
result.append("")
skipspace = 0
m = h2.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
section += 1
headingname = getheadingname(m)
result.append("""<H2><a name="%s"></a>%d. %s</H2>""" % (headingname,section, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection:
index += "</ul>\n"
if section == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
subsection = 0
subsubsection = 0
subsubsubsection = 0
skipspace = 1
continue
m = h3.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsection += 1
headingname = getheadingname(m)
result.append("""<H3><a name="%s"></a>%d.%d %s</H3>""" % (headingname,section, subsection, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
subsubsection = 0
skipspace = 1
continue
m = h4.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsubsection += 1
subsubsubsection = 0
headingname = getheadingname(m)
result.append("""<H4><a name="%s"></a>%d.%d.%d %s</H4>""" % (headingname,section, subsection, subsubsection, prevheadingtext))
if subsubsubsection:
index += "</ul>\n"
if subsubsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
skipspace = 1
continue
m = h5.match(s)
if m:
prevheadingtext = m.group(2)
nameindex += 1
subsubsubsection += 1
headingname = getheadingname(m)
result.append("""<H5><a name="%s"></a>%d.%d.%d.%d %s</H5>""" % (headingname,section, subsection, subsubsection, subsubsubsection, prevheadingtext))
if subsubsubsection == 1:
index += "<ul>\n"
index += """<li><a href="#%s">%s</a>\n""" % (headingname,prevheadingtext)
skipspace = 1
continue
result.append(s)
if subsubsubsection:
index += "</ul>\n"
if subsubsection:
index += "</ul>\n"
if subsection:
index += "</ul>\n"
if section:
index += "</ul>\n"
index += "</div>\n<!-- INDEX -->\n"
data = "\n".join(result)
data = data.replace("@INDEX@",index) + "\n";
# Write the file back out
open(filename,"w").write(data)
| apache-2.0 |
hkawasaki/kawasaki-aio8-1 | common/djangoapps/student/tests/factories.py | 25 | 3819 | """Provides factories for student models."""
from student.models import (User, UserProfile, Registration,
CourseEnrollmentAllowed, CourseEnrollment,
PendingEmailChange, UserStanding,
)
from course_modes.models import CourseMode
from django.contrib.auth.models import Group, AnonymousUser
from datetime import datetime
import factory
from factory.django import DjangoModelFactory
from uuid import uuid4
from pytz import UTC
# Factories don't have __init__ methods, and are self documenting
# pylint: disable=W0232, C0111
class GroupFactory(DjangoModelFactory):
FACTORY_FOR = Group
FACTORY_DJANGO_GET_OR_CREATE = ('name', )
name = factory.Sequence(u'group{0}'.format)
class UserStandingFactory(DjangoModelFactory):
FACTORY_FOR = UserStanding
user = None
account_status = None
changed_by = None
class UserProfileFactory(DjangoModelFactory):
FACTORY_FOR = UserProfile
FACTORY_DJANGO_GET_OR_CREATE = ('user', )
user = None
name = factory.LazyAttribute(u'{0.user.first_name} {0.user.last_name}'.format)
level_of_education = None
gender = u'm'
mailing_address = None
goals = u'World domination'
class CourseModeFactory(DjangoModelFactory):
FACTORY_FOR = CourseMode
course_id = None
mode_display_name = u'Honor Code',
mode_slug = 'honor'
min_price = 0
suggested_prices = ''
currency = 'usd'
class RegistrationFactory(DjangoModelFactory):
FACTORY_FOR = Registration
user = None
activation_key = uuid4().hex.decode('ascii')
class UserFactory(DjangoModelFactory):
FACTORY_FOR = User
FACTORY_DJANGO_GET_OR_CREATE = ('email', 'username')
username = factory.Sequence(u'robot{0}'.format)
email = factory.Sequence(u'robot+test+{0}@edx.org'.format)
password = factory.PostGenerationMethodCall('set_password', 'test')
first_name = factory.Sequence(u'Robot{0}'.format)
last_name = 'Test'
is_staff = False
is_active = True
is_superuser = False
last_login = datetime(2012, 1, 1, tzinfo=UTC)
date_joined = datetime(2011, 1, 1, tzinfo=UTC)
@factory.post_generation
def profile(obj, create, extracted, **kwargs): # pylint: disable=unused-argument, no-self-argument
if create:
obj.save()
return UserProfileFactory.create(user=obj, **kwargs)
elif kwargs:
raise Exception("Cannot build a user profile without saving the user")
else:
return None
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if extracted is None:
return
if isinstance(extracted, basestring):
extracted = [extracted]
for group_name in extracted:
self.groups.add(GroupFactory.simple_generate(create, name=group_name))
class AnonymousUserFactory(factory.Factory):
FACTORY_FOR = AnonymousUser
class AdminFactory(UserFactory):
is_staff = True
class CourseEnrollmentFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollment
user = factory.SubFactory(UserFactory)
course_id = u'edX/toy/2012_Fall'
class CourseEnrollmentAllowedFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollmentAllowed
email = 'test@edx.org'
course_id = 'edX/test/2012_Fall'
class PendingEmailChangeFactory(DjangoModelFactory):
"""Factory for PendingEmailChange objects
user: generated by UserFactory
new_email: sequence of new+email+{}@edx.org
activation_key: sequence of integers, padded to 30 characters
"""
FACTORY_FOR = PendingEmailChange
user = factory.SubFactory(UserFactory)
new_email = factory.Sequence(u'new+email+{0}@edx.org'.format)
activation_key = factory.Sequence(u'{:0<30d}'.format)
| agpl-3.0 |
fangxingli/hue | desktop/core/ext-py/Django-1.6.10/tests/model_inheritance_same_model_name/tests.py | 58 | 1517 | from __future__ import absolute_import
from django.test import TestCase
from model_inheritance.models import Title
class InheritanceSameModelNameTests(TestCase):
def setUp(self):
# The Title model has distinct accessors for both
# model_inheritance.Copy and model_inheritance_same_model_name.Copy
# models.
self.title = Title.objects.create(title='Lorem Ipsum')
def test_inheritance_related_name(self):
from model_inheritance.models import Copy
self.assertEqual(
self.title.attached_model_inheritance_copy_set.create(
content='Save $ on V1agr@',
url='http://v1agra.com/',
title='V1agra is spam',
), Copy.objects.get(content='Save $ on V1agr@'))
def test_inheritance_with_same_model_name(self):
from model_inheritance_same_model_name.models import Copy
self.assertEqual(
self.title.attached_model_inheritance_same_model_name_copy_set.create(
content='The Web framework for perfectionists with deadlines.',
url='http://www.djangoproject.com/',
title='Django Rocks'
), Copy.objects.get(content='The Web framework for perfectionists with deadlines.'))
def test_related_name_attribute_exists(self):
# The Post model doesn't have an attribute called 'attached_%(app_label)s_%(class)s_set'.
self.assertEqual(hasattr(self.title, 'attached_%(app_label)s_%(class)s_set'), False)
| apache-2.0 |
ioam/svn-history | imagen/patternfn.py | 1 | 10714 | """
Family of two-dimensional functions indexed by x and y.
All functions are written to be valid both for scalar x and y, and for
numpy arrays of x and y (in which case the result is also an array);
the functions therefore have the same mathematical behaviour as numpy.
$Id$
"""
from __future__ import with_statement
__version__='$Revision$'
from math import pi
from numpy.oldnumeric import where,maximum,cos,sqrt,divide,greater_equal,bitwise_xor,exp
from numpy.oldnumeric import arcsin,logical_and,logical_or,less,minimum
from numpy import seterr, log
from contextlib import contextmanager
# CEBALERT: abs() is used in various places in this file, but I don't
# see it on the list of numpy imports. I guess we're mistakenly not
# using numpy's abs...
@contextmanager
def float_error_ignore():
"""
Many of the functions in this module use Gaussian smoothing, which
is based on a calculation like exp(divide(x*x,sigma)). When sigma
is zero the value of this expression should be zero at all points
in the plane, because such a Gaussian is infinitely small.
Obtaining the correct answer using finite-precision floating-point
array computations requires allowing infinite values to be
returned from divide(), and allowing exp() to underflow silently
to zero when given an infinite value. In numpy this is achieved
by using its seterr() function to disable divide-by-zero and
underflow warnings temporarily while these values are being
computed.
"""
oldsettings=seterr(divide='ignore',under='ignore')
yield
seterr(**oldsettings)
def gaussian(x, y, xsigma, ysigma):
"""
Two-dimensional oriented Gaussian pattern (i.e., 2D version of a
bell curve, like a normal distribution but not necessarily summing
to 1.0).
"""
if xsigma==0.0 or ysigma==0.0:
return x*0.0
with float_error_ignore():
x_w = divide(x,xsigma)
y_h = divide(y,ysigma)
return exp(-0.5*x_w*x_w + -0.5*y_h*y_h)
def log_gaussian(x, y, x_sigma, y_sigma, mu):
"""
Two-dimensional oriented Log Gaussian pattern (i.e., 2D version of a
bell curve with an independent, movable peak). Much like a normal
distribution, but not necessarily placing the peak above the center,
and not necessarily summing to 1.0).
"""
if x_sigma==0.0 or y_sigma==0.0:
return x * 0.0
with float_error_ignore():
x_w = divide(log(x)-mu, x_sigma*x_sigma)
y_h = divide(log(y)-mu, y_sigma*y_sigma)
return exp(-0.5*x_w*x_w + -0.5*y_h*y_h)
def sigmoid(axis, slope):
"""
Sigmoid dividing axis into a positive and negative half,
with a smoothly sloping transition between them (controlled by the slope).
At default rotation, axis refers to the vertical (y) axis.
"""
with float_error_ignore():
return (2.0 / (1.0 + exp(-2.0*slope*axis))) - 1.0
def exponential(x, y, xscale, yscale):
"""
Two-dimensional oriented exponential decay pattern.
"""
if xscale==0.0 or yscale==0.0:
return x*0.0
with float_error_ignore():
x_w = divide(x,xscale)
y_h = divide(y,yscale)
return exp(-sqrt(x_w*x_w+y_h*y_h))
def gabor(x, y, xsigma, ysigma, frequency, phase):
"""
Gabor pattern (sine grating multiplied by a circular Gaussian).
"""
if xsigma==0.0 or ysigma==0.0:
return x*0.0
with float_error_ignore():
x_w = divide(x,xsigma)
y_h = divide(y,ysigma)
p = exp(-0.5*x_w*x_w + -0.5*y_h*y_h)
return p * 0.5*cos(2*pi*frequency*y + phase)
# JABHACKALERT: Shouldn't this use 'size' instead of 'thickness',
# for consistency with the other patterns? Right now, it has a
# size parameter and ignores it, which is very confusing. I guess
# it's called thickness to match ring, but matching gaussian and disk
# is probably more important.
def line(y, thickness, gaussian_width):
"""
Infinite-length line with a solid central region, then Gaussian fall-off at the edges.
"""
distance_from_line = abs(y)
gaussian_y_coord = distance_from_line - thickness/2.0
sigmasq = gaussian_width*gaussian_width
if sigmasq==0.0:
falloff = y*0.0
else:
with float_error_ignore():
falloff = exp(divide(-gaussian_y_coord*gaussian_y_coord,2*sigmasq))
return where(gaussian_y_coord<=0, 1.0, falloff)
def disk(x, y, height, gaussian_width):
"""
Circular disk with Gaussian fall-off after the solid central region.
"""
disk_radius = height/2.0
distance_from_origin = sqrt(x**2+y**2)
distance_outside_disk = distance_from_origin - disk_radius
sigmasq = gaussian_width*gaussian_width
if sigmasq==0.0:
falloff = x*0.0
else:
with float_error_ignore():
falloff = exp(divide(-distance_outside_disk*distance_outside_disk,
2*sigmasq))
return where(distance_outside_disk<=0,1.0,falloff)
def ring(x, y, height, thickness, gaussian_width):
"""
Circular ring (annulus) with Gaussian fall-off after the solid ring-shaped region.
"""
radius = height/2.0
half_thickness = thickness/2.0
distance_from_origin = sqrt(x**2+y**2)
distance_outside_outer_disk = distance_from_origin - radius - half_thickness
distance_inside_inner_disk = radius - half_thickness - distance_from_origin
ring = 1.0-bitwise_xor(greater_equal(distance_inside_inner_disk,0.0),greater_equal(distance_outside_outer_disk,0.0))
sigmasq = gaussian_width*gaussian_width
if sigmasq==0.0:
inner_falloff = x*0.0
outer_falloff = x*0.0
else:
with float_error_ignore():
inner_falloff = exp(divide(-distance_inside_inner_disk*distance_inside_inner_disk, 2.0*sigmasq))
outer_falloff = exp(divide(-distance_outside_outer_disk*distance_outside_outer_disk, 2.0*sigmasq))
return maximum(inner_falloff,maximum(outer_falloff,ring))
def smooth_rectangle(x, y, rec_w, rec_h, gaussian_width_x, gaussian_width_y):
"""
Rectangle with a solid central region, then Gaussian fall-off at the edges.
"""
gaussian_x_coord = abs(x)-rec_w/2.0
gaussian_y_coord = abs(y)-rec_h/2.0
box_x=less(gaussian_x_coord,0.0)
box_y=less(gaussian_y_coord,0.0)
sigmasq_x=gaussian_width_x*gaussian_width_x
sigmasq_y=gaussian_width_y*gaussian_width_y
with float_error_ignore():
falloff_x=x*0.0 if sigmasq_x==0.0 else \
exp(divide(-gaussian_x_coord*gaussian_x_coord,2*sigmasq_x))
falloff_y=y*0.0 if sigmasq_y==0.0 else \
exp(divide(-gaussian_y_coord*gaussian_y_coord,2*sigmasq_y))
return minimum(maximum(box_x,falloff_x), maximum(box_y,falloff_y))
def arc_by_radian(x, y, height, radian_range, thickness, gaussian_width):
"""
Radial arc with Gaussian fall-off after the solid ring-shaped
region with the given thickness, with shape specified by the
(start,end) radian_range.
"""
# Create a circular ring (copied from the ring function)
radius = height/2.0
half_thickness = thickness/2.0
distance_from_origin = sqrt(x**2+y**2)
distance_outside_outer_disk = distance_from_origin - radius - half_thickness
distance_inside_inner_disk = radius - half_thickness - distance_from_origin
ring = 1.0-bitwise_xor(greater_equal(distance_inside_inner_disk,0.0),greater_equal(distance_outside_outer_disk,0.0))
sigmasq = gaussian_width*gaussian_width
if sigmasq==0.0:
inner_falloff = x*0.0
outer_falloff = x*0.0
else:
with float_error_ignore():
inner_falloff = exp(divide(-distance_inside_inner_disk*distance_inside_inner_disk, 2.0*sigmasq))
outer_falloff = exp(divide(-distance_outside_outer_disk*distance_outside_outer_disk, 2.0*sigmasq))
output_ring = maximum(inner_falloff,maximum(outer_falloff,ring))
# Calculate radians (in 4 phases) and cut according to the set range)
# RZHACKALERT:
# Function float_error_ignore() cannot catch the exception when
# both dividend and divisor are 0.0, and when only divisor is 0.0
# it returns 'Inf' rather than 0.0. In x, y and
# distance_from_origin, only one point in distance_from_origin can
# be 0.0 (circle center) and in this point x and y must be 0.0 as
# well. So here is a hack to avoid the 'invalid value encountered
# in divide' error by turning 0.0 to 1e-5 in distance_from_origin.
distance_from_origin += where(distance_from_origin == 0.0, 1e-5, 0)
with float_error_ignore():
sines = divide(y, distance_from_origin)
cosines = divide(x, distance_from_origin)
arcsines = arcsin(sines)
phase_1 = where(logical_and(sines >= 0, cosines >= 0), 2*pi-arcsines, 0)
phase_2 = where(logical_and(sines >= 0, cosines < 0), pi+arcsines, 0)
phase_3 = where(logical_and(sines < 0, cosines < 0), pi+arcsines, 0)
phase_4 = where(logical_and(sines < 0, cosines >= 0), -arcsines, 0)
arcsines = phase_1 + phase_2 + phase_3 + phase_4
if radian_range[0] <= radian_range[1]:
return where(logical_and(arcsines >= radian_range[0], arcsines <= radian_range[1]),
output_ring, 0.0)
else:
return where(logical_or(arcsines >= radian_range[0], arcsines <= radian_range[1]),
output_ring, 0.0)
def arc_by_center(x, y, arc_box, constant_length, thickness, gaussian_width):
"""
Arc with Gaussian fall-off after the solid ring-shaped region and specified
by point of tangency (x and y) and arc width and height.
This function calculates the start and end radian from the given width and
height, and then calls arc_by_radian function to draw the curve.
"""
arc_w=arc_box[0]
arc_h=abs(arc_box[1])
if arc_w==0.0: # arc_w=0, don't draw anything
radius=0.0
angles=(0.0,0.0)
elif arc_h==0.0: # draw a horizontal line, width=arc_w
return smooth_rectangle(x, y, arc_w, thickness, 0.0, gaussian_width)
else:
if constant_length:
curvature=arc_h/arc_w
radius=arc_w/(2*pi*curvature)
angle=curvature*(2*pi)/2.0
else: # constant width
radius=arc_h/2.0+arc_w**2.0/(8*arc_h)
angle=arcsin(arc_w/2.0/radius)
if arc_box[1]<0: # convex shape
y=y+radius
angles=(3.0/2.0*pi-angle, 3.0/2.0*pi+angle)
else: # concave shape
y=y-radius
angles=(pi/2.0-angle, pi/2.0+angle)
return arc_by_radian(x, y, radius*2.0, angles, thickness, gaussian_width)
| bsd-3-clause |
Tehsmash/nova | nova/network/linux_net.py | 5 | 76551 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implements vlans, bridges, and iptables rules using linux utilities."""
import calendar
import inspect
import os
import re
import time
import netaddr
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from nova import exception
from nova.i18n import _, _LE, _LW
from nova import objects
from nova.openstack.common import fileutils
from nova import paths
from nova.pci import utils as pci_utils
from nova import utils
LOG = logging.getLogger(__name__)
linux_net_opts = [
cfg.MultiStrOpt('dhcpbridge_flagfile',
default=['/etc/nova/nova-dhcpbridge.conf'],
help='Location of flagfiles for dhcpbridge'),
cfg.StrOpt('networks_path',
default=paths.state_path_def('networks'),
help='Location to keep network config files'),
cfg.StrOpt('public_interface',
default='eth0',
help='Interface for public IP addresses'),
cfg.StrOpt('dhcpbridge',
default=paths.bindir_def('nova-dhcpbridge'),
help='Location of nova-dhcpbridge'),
cfg.StrOpt('routing_source_ip',
default='$my_ip',
help='Public IP of network host'),
cfg.IntOpt('dhcp_lease_time',
default=86400,
help='Lifetime of a DHCP lease in seconds'),
cfg.MultiStrOpt('dns_server',
default=[],
help='If set, uses specific DNS server for dnsmasq. Can'
' be specified multiple times.'),
cfg.BoolOpt('use_network_dns_servers',
default=False,
help='If set, uses the dns1 and dns2 from the network ref.'
' as dns servers.'),
cfg.ListOpt('dmz_cidr',
default=[],
help='A list of dmz ranges that should be accepted'),
cfg.MultiStrOpt('force_snat_range',
default=[],
help='Traffic to this range will always be snatted to the '
'fallback ip, even if it would normally be bridged out '
'of the node. Can be specified multiple times.'),
cfg.StrOpt('dnsmasq_config_file',
default='',
help='Override the default dnsmasq settings with this file'),
cfg.StrOpt('linuxnet_interface_driver',
default='nova.network.linux_net.LinuxBridgeInterfaceDriver',
help='Driver used to create ethernet devices.'),
cfg.StrOpt('linuxnet_ovs_integration_bridge',
default='br-int',
help='Name of Open vSwitch bridge used with linuxnet'),
cfg.BoolOpt('send_arp_for_ha',
default=False,
help='Send gratuitous ARPs for HA setup'),
cfg.IntOpt('send_arp_for_ha_count',
default=3,
help='Send this many gratuitous ARPs for HA setup'),
cfg.BoolOpt('use_single_default_gateway',
default=False,
help='Use single default gateway. Only first nic of vm will '
'get default gateway from dhcp server'),
cfg.MultiStrOpt('forward_bridge_interface',
default=['all'],
help='An interface that bridges can forward to. If this '
'is set to all then all traffic will be forwarded. '
'Can be specified multiple times.'),
cfg.StrOpt('metadata_host',
default='$my_ip',
help='The IP address for the metadata API server'),
cfg.IntOpt('metadata_port',
default=8775,
help='The port for the metadata API port'),
cfg.StrOpt('iptables_top_regex',
default='',
help='Regular expression to match the iptables rule that '
'should always be on the top.'),
cfg.StrOpt('iptables_bottom_regex',
default='',
help='Regular expression to match the iptables rule that '
'should always be on the bottom.'),
cfg.StrOpt('iptables_drop_action',
default='DROP',
help='The table that iptables to jump to when a packet is '
'to be dropped.'),
cfg.IntOpt('ovs_vsctl_timeout',
default=120,
help='Amount of time, in seconds, that ovs_vsctl should wait '
'for a response from the database. 0 is to wait forever.'),
cfg.BoolOpt('fake_network',
default=False,
help='If passed, use fake network devices and addresses'),
cfg.IntOpt('ebtables_exec_attempts',
default=3,
help='Number of times to retry ebtables commands on failure.'),
cfg.FloatOpt('ebtables_retry_interval',
default=1.0,
help='Number of seconds to wait between ebtables retries.'),
]
CONF = cfg.CONF
CONF.register_opts(linux_net_opts)
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('use_ipv6', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('network_device_mtu', 'nova.objects.network')
# NOTE(vish): Iptables supports chain names of up to 28 characters, and we
# add up to 12 characters to binary_name which is used as a prefix,
# so we limit it to 16 characters.
# (max_chain_name_length - len('-POSTROUTING') == 16)
def get_binary_name():
"""Grab the name of the binary we're running in."""
return os.path.basename(inspect.stack()[-1][1])[:16]
binary_name = get_binary_name()
class IptablesRule(object):
"""An iptables rule.
You shouldn't need to use this class directly, it's only used by
IptablesManager.
"""
def __init__(self, chain, rule, wrap=True, top=False):
self.chain = chain
self.rule = rule
self.wrap = wrap
self.top = top
def __eq__(self, other):
return ((self.chain == other.chain) and
(self.rule == other.rule) and
(self.top == other.top) and
(self.wrap == other.wrap))
def __ne__(self, other):
return not self == other
def __repr__(self):
if self.wrap:
chain = '%s-%s' % (binary_name, self.chain)
else:
chain = self.chain
# new rules should have a zero [packet: byte] count
return '[0:0] -A %s %s' % (chain, self.rule)
class IptablesTable(object):
"""An iptables table."""
def __init__(self):
self.rules = []
self.remove_rules = []
self.chains = set()
self.unwrapped_chains = set()
self.remove_chains = set()
self.dirty = True
def has_chain(self, name, wrap=True):
if wrap:
return name in self.chains
else:
return name in self.unwrapped_chains
def add_chain(self, name, wrap=True):
"""Adds a named chain to the table.
The chain name is wrapped to be unique for the component creating
it, so different components of Nova can safely create identically
named chains without interfering with one another.
At the moment, its wrapped name is <binary name>-<chain name>,
so if nova-compute creates a chain named 'OUTPUT', it'll actually
end up named 'nova-compute-OUTPUT'.
"""
if wrap:
self.chains.add(name)
else:
self.unwrapped_chains.add(name)
self.dirty = True
def remove_chain(self, name, wrap=True):
"""Remove named chain.
This removal "cascades". All rule in the chain are removed, as are
all rules in other chains that jump to it.
If the chain is not found, this is merely logged.
"""
if wrap:
chain_set = self.chains
else:
chain_set = self.unwrapped_chains
if name not in chain_set:
LOG.warning(_LW('Attempted to remove chain %s which does not '
'exist'), name)
return
self.dirty = True
# non-wrapped chains and rules need to be dealt with specially,
# so we keep a list of them to be iterated over in apply()
if not wrap:
self.remove_chains.add(name)
chain_set.remove(name)
if not wrap:
self.remove_rules += filter(lambda r: r.chain == name, self.rules)
self.rules = filter(lambda r: r.chain != name, self.rules)
if wrap:
jump_snippet = '-j %s-%s' % (binary_name, name)
else:
jump_snippet = '-j %s' % (name,)
if not wrap:
self.remove_rules += filter(lambda r: jump_snippet in r.rule,
self.rules)
self.rules = filter(lambda r: jump_snippet not in r.rule, self.rules)
def add_rule(self, chain, rule, wrap=True, top=False):
"""Add a rule to the table.
This is just like what you'd feed to iptables, just without
the '-A <chain name>' bit at the start.
However, if you need to jump to one of your wrapped chains,
prepend its name with a '$' which will ensure the wrapping
is applied correctly.
"""
if wrap and chain not in self.chains:
raise ValueError(_('Unknown chain: %r') % chain)
if '$' in rule:
rule = ' '.join(map(self._wrap_target_chain, rule.split(' ')))
rule_obj = IptablesRule(chain, rule, wrap, top)
if rule_obj in self.rules:
LOG.debug("Skipping duplicate iptables rule addition. "
"%(rule)r already in %(rules)r",
{'rule': rule_obj, 'rules': self.rules})
else:
self.rules.append(IptablesRule(chain, rule, wrap, top))
self.dirty = True
def _wrap_target_chain(self, s):
if s.startswith('$'):
return '%s-%s' % (binary_name, s[1:])
return s
def remove_rule(self, chain, rule, wrap=True, top=False):
"""Remove a rule from a chain.
Note: The rule must be exactly identical to the one that was added.
You cannot switch arguments around like you can with the iptables
CLI tool.
"""
try:
self.rules.remove(IptablesRule(chain, rule, wrap, top))
if not wrap:
self.remove_rules.append(IptablesRule(chain, rule, wrap, top))
self.dirty = True
except ValueError:
LOG.warning(_LW('Tried to remove rule that was not there:'
' %(chain)r %(rule)r %(wrap)r %(top)r'),
{'chain': chain, 'rule': rule,
'top': top, 'wrap': wrap})
def remove_rules_regex(self, regex):
"""Remove all rules matching regex."""
if isinstance(regex, six.string_types):
regex = re.compile(regex)
num_rules = len(self.rules)
self.rules = filter(lambda r: not regex.match(str(r)), self.rules)
removed = num_rules - len(self.rules)
if removed > 0:
self.dirty = True
return removed
def empty_chain(self, chain, wrap=True):
"""Remove all rules from a chain."""
chained_rules = [rule for rule in self.rules
if rule.chain == chain and rule.wrap == wrap]
if chained_rules:
self.dirty = True
for rule in chained_rules:
self.rules.remove(rule)
class IptablesManager(object):
"""Wrapper for iptables.
See IptablesTable for some usage docs
A number of chains are set up to begin with.
First, nova-filter-top. It's added at the top of FORWARD and OUTPUT. Its
name is not wrapped, so it's shared between the various nova workers. It's
intended for rules that need to live at the top of the FORWARD and OUTPUT
chains. It's in both the ipv4 and ipv6 set of tables.
For ipv4 and ipv6, the built-in INPUT, OUTPUT, and FORWARD filter chains
are wrapped, meaning that the "real" INPUT chain has a rule that jumps to
the wrapped INPUT chain, etc. Additionally, there's a wrapped chain named
"local" which is jumped to from nova-filter-top.
For ipv4, the built-in PREROUTING, OUTPUT, and POSTROUTING nat chains are
wrapped in the same was as the built-in filter chains. Additionally,
there's a snat chain that is applied after the POSTROUTING chain.
"""
def __init__(self, execute=None):
if not execute:
self.execute = _execute
else:
self.execute = execute
self.ipv4 = {'filter': IptablesTable(),
'nat': IptablesTable(),
'mangle': IptablesTable()}
self.ipv6 = {'filter': IptablesTable()}
self.iptables_apply_deferred = False
# Add a nova-filter-top chain. It's intended to be shared
# among the various nova components. It sits at the very top
# of FORWARD and OUTPUT.
for tables in [self.ipv4, self.ipv6]:
tables['filter'].add_chain('nova-filter-top', wrap=False)
tables['filter'].add_rule('FORWARD', '-j nova-filter-top',
wrap=False, top=True)
tables['filter'].add_rule('OUTPUT', '-j nova-filter-top',
wrap=False, top=True)
tables['filter'].add_chain('local')
tables['filter'].add_rule('nova-filter-top', '-j $local',
wrap=False)
# Wrap the built-in chains
builtin_chains = {4: {'filter': ['INPUT', 'OUTPUT', 'FORWARD'],
'nat': ['PREROUTING', 'OUTPUT', 'POSTROUTING'],
'mangle': ['POSTROUTING']},
6: {'filter': ['INPUT', 'OUTPUT', 'FORWARD']}}
for ip_version in builtin_chains:
if ip_version == 4:
tables = self.ipv4
elif ip_version == 6:
tables = self.ipv6
for table, chains in six.iteritems(builtin_chains[ip_version]):
for chain in chains:
tables[table].add_chain(chain)
tables[table].add_rule(chain, '-j $%s' % (chain,),
wrap=False)
# Add a nova-postrouting-bottom chain. It's intended to be shared
# among the various nova components. We set it as the last chain
# of POSTROUTING chain.
self.ipv4['nat'].add_chain('nova-postrouting-bottom', wrap=False)
self.ipv4['nat'].add_rule('POSTROUTING', '-j nova-postrouting-bottom',
wrap=False)
# We add a snat chain to the shared nova-postrouting-bottom chain
# so that it's applied last.
self.ipv4['nat'].add_chain('snat')
self.ipv4['nat'].add_rule('nova-postrouting-bottom', '-j $snat',
wrap=False)
# And then we add a float-snat chain and jump to first thing in
# the snat chain.
self.ipv4['nat'].add_chain('float-snat')
self.ipv4['nat'].add_rule('snat', '-j $float-snat')
def defer_apply_on(self):
self.iptables_apply_deferred = True
def defer_apply_off(self):
self.iptables_apply_deferred = False
self.apply()
def dirty(self):
for table in six.itervalues(self.ipv4):
if table.dirty:
return True
if CONF.use_ipv6:
for table in six.itervalues(self.ipv6):
if table.dirty:
return True
return False
def apply(self):
if self.iptables_apply_deferred:
return
if self.dirty():
self._apply()
else:
LOG.debug("Skipping apply due to lack of new rules")
@utils.synchronized('iptables', external=True)
def _apply(self):
"""Apply the current in-memory set of iptables rules.
This will blow away any rules left over from previous runs of the
same component of Nova, and replace them with our current set of
rules. This happens atomically, thanks to iptables-restore.
"""
s = [('iptables', self.ipv4)]
if CONF.use_ipv6:
s += [('ip6tables', self.ipv6)]
for cmd, tables in s:
all_tables, _err = self.execute('%s-save' % (cmd,), '-c',
run_as_root=True,
attempts=5)
all_lines = all_tables.split('\n')
for table_name, table in six.iteritems(tables):
start, end = self._find_table(all_lines, table_name)
all_lines[start:end] = self._modify_rules(
all_lines[start:end], table, table_name)
table.dirty = False
self.execute('%s-restore' % (cmd,), '-c', run_as_root=True,
process_input='\n'.join(all_lines),
attempts=5)
LOG.debug("IPTablesManager.apply completed with success")
def _find_table(self, lines, table_name):
if len(lines) < 3:
# length only <2 when fake iptables
return (0, 0)
try:
start = lines.index('*%s' % table_name) - 1
except ValueError:
# Couldn't find table_name
return (0, 0)
end = lines[start:].index('COMMIT') + start + 2
return (start, end)
def _modify_rules(self, current_lines, table, table_name):
unwrapped_chains = table.unwrapped_chains
chains = sorted(table.chains)
remove_chains = table.remove_chains
rules = table.rules
remove_rules = table.remove_rules
if not current_lines:
fake_table = ['#Generated by nova',
'*' + table_name, 'COMMIT',
'#Completed by nova']
current_lines = fake_table
# Remove any trace of our rules
new_filter = filter(lambda line: binary_name not in line,
current_lines)
top_rules = []
bottom_rules = []
if CONF.iptables_top_regex:
regex = re.compile(CONF.iptables_top_regex)
temp_filter = filter(lambda line: regex.search(line), new_filter)
for rule_str in temp_filter:
new_filter = filter(lambda s: s.strip() != rule_str.strip(),
new_filter)
top_rules = temp_filter
if CONF.iptables_bottom_regex:
regex = re.compile(CONF.iptables_bottom_regex)
temp_filter = filter(lambda line: regex.search(line), new_filter)
for rule_str in temp_filter:
new_filter = filter(lambda s: s.strip() != rule_str.strip(),
new_filter)
bottom_rules = temp_filter
seen_chains = False
rules_index = 0
for rules_index, rule in enumerate(new_filter):
if not seen_chains:
if rule.startswith(':'):
seen_chains = True
else:
if not rule.startswith(':'):
break
if not seen_chains:
rules_index = 2
our_rules = top_rules
bot_rules = []
for rule in rules:
rule_str = str(rule)
if rule.top:
# rule.top == True means we want this rule to be at the top.
# Further down, we weed out duplicates from the bottom of the
# list, so here we remove the dupes ahead of time.
# We don't want to remove an entry if it has non-zero
# [packet:byte] counts and replace it with [0:0], so let's
# go look for a duplicate, and over-ride our table rule if
# found.
# ignore [packet:byte] counts at beginning of line
if rule_str.startswith('['):
rule_str = rule_str.split(']', 1)[1]
dup_filter = filter(lambda s: rule_str.strip() in s.strip(),
new_filter)
new_filter = filter(lambda s:
rule_str.strip() not in s.strip(),
new_filter)
# if no duplicates, use original rule
if dup_filter:
# grab the last entry, if there is one
dup = dup_filter[-1]
rule_str = str(dup)
else:
rule_str = str(rule)
rule_str.strip()
our_rules += [rule_str]
else:
bot_rules += [rule_str]
our_rules += bot_rules
new_filter[rules_index:rules_index] = our_rules
new_filter[rules_index:rules_index] = [':%s - [0:0]' % (name,)
for name in unwrapped_chains]
new_filter[rules_index:rules_index] = [':%s-%s - [0:0]' %
(binary_name, name,)
for name in chains]
commit_index = new_filter.index('COMMIT')
new_filter[commit_index:commit_index] = bottom_rules
seen_lines = set()
def _weed_out_duplicates(line):
# ignore [packet:byte] counts at beginning of lines
if line.startswith('['):
line = line.split(']', 1)[1]
line = line.strip()
if line in seen_lines:
return False
else:
seen_lines.add(line)
return True
def _weed_out_removes(line):
# We need to find exact matches here
if line.startswith(':'):
# it's a chain, for example, ":nova-billing - [0:0]"
# strip off everything except the chain name
line = line.split(':')[1]
line = line.split('- [')[0]
line = line.strip()
for chain in remove_chains:
if chain == line:
remove_chains.remove(chain)
return False
elif line.startswith('['):
# it's a rule
# ignore [packet:byte] counts at beginning of lines
line = line.split(']', 1)[1]
line = line.strip()
for rule in remove_rules:
# ignore [packet:byte] counts at beginning of rules
rule_str = str(rule)
rule_str = rule_str.split(' ', 1)[1]
rule_str = rule_str.strip()
if rule_str == line:
remove_rules.remove(rule)
return False
# Leave it alone
return True
# We filter duplicates, letting the *last* occurrence take
# precedence. We also filter out anything in the "remove"
# lists.
new_filter.reverse()
new_filter = filter(_weed_out_duplicates, new_filter)
new_filter = filter(_weed_out_removes, new_filter)
new_filter.reverse()
# flush lists, just in case we didn't find something
remove_chains.clear()
for rule in remove_rules:
remove_rules.remove(rule)
return new_filter
# NOTE(jkoelker) This is just a nice little stub point since mocking
# builtins with mox is a nightmare
def write_to_file(file, data, mode='w'):
with open(file, mode) as f:
f.write(data)
def is_pid_cmdline_correct(pid, match):
"""Ensure that the cmdline for a pid seems sane
Because pids are recycled, blindly killing by pid is something to
avoid. This provides the ability to include a substring that is
expected in the cmdline as a safety check.
"""
try:
with open('/proc/%d/cmdline' % pid) as f:
cmdline = f.read()
return match in cmdline
except EnvironmentError:
return False
def metadata_forward():
"""Create forwarding rule for metadata."""
if CONF.metadata_host != '127.0.0.1':
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-s 0.0.0.0/0 -d 169.254.169.254/32 '
'-p tcp -m tcp --dport 80 -j DNAT '
'--to-destination %s:%s' %
(CONF.metadata_host,
CONF.metadata_port))
else:
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-s 0.0.0.0/0 -d 169.254.169.254/32 '
'-p tcp -m tcp --dport 80 '
'-j REDIRECT --to-ports %s' %
CONF.metadata_port)
iptables_manager.apply()
def _iptables_dest(ip):
if ((netaddr.IPAddress(ip).version == 4 and ip == '127.0.0.1')
or ip == '::1'):
return '-m addrtype --dst-type LOCAL'
else:
return '-d %s' % ip
def metadata_accept():
"""Create the filter accept rule for metadata."""
rule = ('-p tcp -m tcp --dport %s %s -j ACCEPT' %
(CONF.metadata_port, _iptables_dest(CONF.metadata_host)))
if netaddr.IPAddress(CONF.metadata_host).version == 4:
iptables_manager.ipv4['filter'].add_rule('INPUT', rule)
else:
iptables_manager.ipv6['filter'].add_rule('INPUT', rule)
iptables_manager.apply()
def add_snat_rule(ip_range, is_external=False):
if CONF.routing_source_ip:
if is_external:
if CONF.force_snat_range:
snat_range = CONF.force_snat_range
else:
snat_range = []
else:
snat_range = ['0.0.0.0/0']
for dest_range in snat_range:
rule = ('-s %s -d %s -j SNAT --to-source %s'
% (ip_range, dest_range, CONF.routing_source_ip))
if not is_external and CONF.public_interface:
rule += ' -o %s' % CONF.public_interface
iptables_manager.ipv4['nat'].add_rule('snat', rule)
iptables_manager.apply()
def init_host(ip_range, is_external=False):
"""Basic networking setup goes here."""
# NOTE(devcamcar): Cloud public SNAT entries and the default
# SNAT rule for outbound traffic.
add_snat_rule(ip_range, is_external)
rules = []
if is_external:
for snat_range in CONF.force_snat_range:
rules.append('PREROUTING -p ipv4 --ip-src %s --ip-dst %s '
'-j redirect --redirect-target ACCEPT' %
(ip_range, snat_range))
if rules:
ensure_ebtables_rules(rules, 'nat')
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %s -d %s/32 -j ACCEPT' %
(ip_range, CONF.metadata_host))
for dmz in CONF.dmz_cidr:
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %s -d %s -j ACCEPT' %
(ip_range, dmz))
iptables_manager.ipv4['nat'].add_rule('POSTROUTING',
'-s %(range)s -d %(range)s '
'-m conntrack ! --ctstate DNAT '
'-j ACCEPT' %
{'range': ip_range})
iptables_manager.apply()
def send_arp_for_ip(ip, device, count):
out, err = _execute('arping', '-U', ip,
'-A', '-I', device,
'-c', str(count),
run_as_root=True, check_exit_code=False)
if err:
LOG.debug('arping error for ip %s', ip)
def bind_floating_ip(floating_ip, device):
"""Bind ip to public interface."""
_execute('ip', 'addr', 'add', str(floating_ip) + '/32',
'dev', device,
run_as_root=True, check_exit_code=[0, 2, 254])
if CONF.send_arp_for_ha and CONF.send_arp_for_ha_count > 0:
send_arp_for_ip(floating_ip, device, CONF.send_arp_for_ha_count)
def unbind_floating_ip(floating_ip, device):
"""Unbind a public ip from public interface."""
_execute('ip', 'addr', 'del', str(floating_ip) + '/32',
'dev', device,
run_as_root=True, check_exit_code=[0, 2, 254])
def ensure_metadata_ip():
"""Sets up local metadata ip."""
_execute('ip', 'addr', 'add', '169.254.169.254/32',
'scope', 'link', 'dev', 'lo',
run_as_root=True, check_exit_code=[0, 2, 254])
def ensure_vpn_forward(public_ip, port, private_ip):
"""Sets up forwarding rules for vlan."""
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'-d %s -p udp '
'--dport 1194 '
'-j ACCEPT' % private_ip)
iptables_manager.ipv4['nat'].add_rule('PREROUTING',
'-d %s -p udp '
'--dport %s -j DNAT --to %s:1194' %
(public_ip, port, private_ip))
iptables_manager.ipv4['nat'].add_rule('OUTPUT',
'-d %s -p udp '
'--dport %s -j DNAT --to %s:1194' %
(public_ip, port, private_ip))
iptables_manager.apply()
def ensure_floating_forward(floating_ip, fixed_ip, device, network):
"""Ensure floating ip forwarding rule."""
# NOTE(vish): Make sure we never have duplicate rules for the same ip
regex = '.*\s+%s(/32|\s+|$)' % floating_ip
num_rules = iptables_manager.ipv4['nat'].remove_rules_regex(regex)
if num_rules:
msg = _LW('Removed %(num)d duplicate rules for floating ip %(float)s')
LOG.warn(msg, {'num': num_rules, 'float': floating_ip})
for chain, rule in floating_forward_rules(floating_ip, fixed_ip, device):
iptables_manager.ipv4['nat'].add_rule(chain, rule)
iptables_manager.apply()
if device != network['bridge']:
ensure_ebtables_rules(*floating_ebtables_rules(fixed_ip, network))
def remove_floating_forward(floating_ip, fixed_ip, device, network):
"""Remove forwarding for floating ip."""
for chain, rule in floating_forward_rules(floating_ip, fixed_ip, device):
iptables_manager.ipv4['nat'].remove_rule(chain, rule)
iptables_manager.apply()
if device != network['bridge']:
remove_ebtables_rules(*floating_ebtables_rules(fixed_ip, network))
def floating_ebtables_rules(fixed_ip, network):
"""Makes sure only in-network traffic is bridged."""
return (['PREROUTING --logical-in %s -p ipv4 --ip-src %s '
'! --ip-dst %s -j redirect --redirect-target ACCEPT' %
(network['bridge'], fixed_ip, network['cidr'])], 'nat')
def floating_forward_rules(floating_ip, fixed_ip, device):
rules = []
rule = '-s %s -j SNAT --to %s' % (fixed_ip, floating_ip)
if device:
rules.append(('float-snat', rule + ' -d %s' % fixed_ip))
rules.append(('float-snat', rule + ' -o %s' % device))
else:
rules.append(('float-snat', rule))
rules.append(
('PREROUTING', '-d %s -j DNAT --to %s' % (floating_ip, fixed_ip)))
rules.append(
('OUTPUT', '-d %s -j DNAT --to %s' % (floating_ip, fixed_ip)))
rules.append(('POSTROUTING', '-s %s -m conntrack --ctstate DNAT -j SNAT '
'--to-source %s' %
(fixed_ip, floating_ip)))
return rules
def clean_conntrack(fixed_ip):
try:
_execute('conntrack', '-D', '-r', fixed_ip, run_as_root=True,
check_exit_code=[0, 1])
except processutils.ProcessExecutionError:
LOG.exception(_LE('Error deleting conntrack entries for %s'), fixed_ip)
def _enable_ipv4_forwarding():
sysctl_key = 'net.ipv4.ip_forward'
stdout, stderr = _execute('sysctl', '-n', sysctl_key)
if stdout.strip() is not '1':
_execute('sysctl', '-w', '%s=1' % sysctl_key, run_as_root=True)
@utils.synchronized('lock_gateway', external=True)
def initialize_gateway_device(dev, network_ref):
if not network_ref:
return
_enable_ipv4_forwarding()
# NOTE(vish): The ip for dnsmasq has to be the first address on the
# bridge for it to respond to requests properly
try:
prefix = network_ref.cidr.prefixlen
except AttributeError:
prefix = network_ref['cidr'].rpartition('/')[2]
full_ip = '%s/%s' % (network_ref['dhcp_server'], prefix)
new_ip_params = [[full_ip, 'brd', network_ref['broadcast']]]
old_ip_params = []
out, err = _execute('ip', 'addr', 'show', 'dev', dev,
'scope', 'global')
for line in out.split('\n'):
fields = line.split()
if fields and fields[0] == 'inet':
if fields[-2] in ('secondary', 'dynamic'):
ip_params = fields[1:-2]
else:
ip_params = fields[1:-1]
old_ip_params.append(ip_params)
if ip_params[0] != full_ip:
new_ip_params.append(ip_params)
if not old_ip_params or old_ip_params[0][0] != full_ip:
old_routes = []
result = _execute('ip', 'route', 'show', 'dev', dev)
if result:
out, err = result
for line in out.split('\n'):
fields = line.split()
if fields and 'via' in fields:
old_routes.append(fields)
_execute('ip', 'route', 'del', fields[0],
'dev', dev, run_as_root=True)
for ip_params in old_ip_params:
_execute(*_ip_bridge_cmd('del', ip_params, dev),
run_as_root=True, check_exit_code=[0, 2, 254])
for ip_params in new_ip_params:
_execute(*_ip_bridge_cmd('add', ip_params, dev),
run_as_root=True, check_exit_code=[0, 2, 254])
for fields in old_routes:
_execute('ip', 'route', 'add', *fields,
run_as_root=True)
if CONF.send_arp_for_ha and CONF.send_arp_for_ha_count > 0:
send_arp_for_ip(network_ref['dhcp_server'], dev,
CONF.send_arp_for_ha_count)
if CONF.use_ipv6:
_execute('ip', '-f', 'inet6', 'addr',
'change', network_ref['cidr_v6'],
'dev', dev, run_as_root=True)
def get_dhcp_leases(context, network_ref):
"""Return a network's hosts config in dnsmasq leasefile format."""
hosts = []
host = None
if network_ref['multi_host']:
host = CONF.host
for fixedip in objects.FixedIPList.get_by_network(context,
network_ref,
host=host):
# NOTE(cfb): Don't return a lease entry if the IP isn't
# already leased
if fixedip.leased:
hosts.append(_host_lease(fixedip))
return '\n'.join(hosts)
def get_dhcp_hosts(context, network_ref, fixedips):
"""Get network's hosts config in dhcp-host format."""
hosts = []
macs = set()
for fixedip in fixedips:
if fixedip.allocated:
if fixedip.virtual_interface.address not in macs:
hosts.append(_host_dhcp(fixedip))
macs.add(fixedip.virtual_interface.address)
return '\n'.join(hosts)
def get_dns_hosts(context, network_ref):
"""Get network's DNS hosts in hosts format."""
hosts = []
for fixedip in objects.FixedIPList.get_by_network(context, network_ref):
if fixedip.allocated:
hosts.append(_host_dns(fixedip))
return '\n'.join(hosts)
def _add_dnsmasq_accept_rules(dev):
"""Allow DHCP and DNS traffic through to dnsmasq."""
table = iptables_manager.ipv4['filter']
for port in [67, 53]:
for proto in ['udp', 'tcp']:
args = {'dev': dev, 'port': port, 'proto': proto}
table.add_rule('INPUT',
'-i %(dev)s -p %(proto)s -m %(proto)s '
'--dport %(port)s -j ACCEPT' % args)
iptables_manager.apply()
def _remove_dnsmasq_accept_rules(dev):
"""Remove DHCP and DNS traffic allowed through to dnsmasq."""
table = iptables_manager.ipv4['filter']
for port in [67, 53]:
for proto in ['udp', 'tcp']:
args = {'dev': dev, 'port': port, 'proto': proto}
table.remove_rule('INPUT',
'-i %(dev)s -p %(proto)s -m %(proto)s '
'--dport %(port)s -j ACCEPT' % args)
iptables_manager.apply()
# NOTE(russellb) Curious why this is needed? Check out this explanation from
# markmc: https://bugzilla.redhat.com/show_bug.cgi?id=910619#c6
def _add_dhcp_mangle_rule(dev):
table = iptables_manager.ipv4['mangle']
table.add_rule('POSTROUTING',
'-o %s -p udp -m udp --dport 68 -j CHECKSUM '
'--checksum-fill' % dev)
iptables_manager.apply()
def _remove_dhcp_mangle_rule(dev):
table = iptables_manager.ipv4['mangle']
table.remove_rule('POSTROUTING',
'-o %s -p udp -m udp --dport 68 -j CHECKSUM '
'--checksum-fill' % dev)
iptables_manager.apply()
def get_dhcp_opts(context, network_ref, fixedips):
"""Get network's hosts config in dhcp-opts format."""
gateway = network_ref['gateway']
# NOTE(vish): if we are in multi-host mode and we are not sharing
# addresses, then we actually need to hand out the
# dhcp server address as the gateway.
if network_ref['multi_host'] and not (network_ref['share_address'] or
CONF.share_dhcp_address):
gateway = network_ref['dhcp_server']
hosts = []
if CONF.use_single_default_gateway:
for fixedip in fixedips:
if fixedip.allocated:
vif_id = fixedip.virtual_interface_id
if fixedip.default_route:
hosts.append(_host_dhcp_opts(vif_id, gateway))
else:
hosts.append(_host_dhcp_opts(vif_id))
else:
hosts.append(_host_dhcp_opts(None, gateway))
return '\n'.join(hosts)
def release_dhcp(dev, address, mac_address):
if device_exists(dev):
try:
utils.execute('dhcp_release', dev, address, mac_address,
run_as_root=True)
except processutils.ProcessExecutionError:
raise exception.NetworkDhcpReleaseFailed(address=address,
mac_address=mac_address)
def update_dhcp(context, dev, network_ref):
conffile = _dhcp_file(dev, 'conf')
host = None
if network_ref['multi_host']:
host = CONF.host
fixedips = objects.FixedIPList.get_by_network(context,
network_ref,
host=host)
write_to_file(conffile, get_dhcp_hosts(context, network_ref, fixedips))
restart_dhcp(context, dev, network_ref, fixedips)
def update_dns(context, dev, network_ref):
hostsfile = _dhcp_file(dev, 'hosts')
host = None
if network_ref['multi_host']:
host = CONF.host
fixedips = objects.FixedIPList.get_by_network(context,
network_ref,
host=host)
write_to_file(hostsfile, get_dns_hosts(context, network_ref))
restart_dhcp(context, dev, network_ref, fixedips)
def update_dhcp_hostfile_with_text(dev, hosts_text):
conffile = _dhcp_file(dev, 'conf')
write_to_file(conffile, hosts_text)
def kill_dhcp(dev):
pid = _dnsmasq_pid_for(dev)
if pid:
# Check that the process exists and looks like a dnsmasq process
conffile = _dhcp_file(dev, 'conf')
if is_pid_cmdline_correct(pid, conffile.split('/')[-1]):
_execute('kill', '-9', pid, run_as_root=True)
else:
LOG.debug('Pid %d is stale, skip killing dnsmasq', pid)
_remove_dnsmasq_accept_rules(dev)
_remove_dhcp_mangle_rule(dev)
# NOTE(ja): Sending a HUP only reloads the hostfile, so any
# configuration options (like dchp-range, vlan, ...)
# aren't reloaded.
@utils.synchronized('dnsmasq_start')
def restart_dhcp(context, dev, network_ref, fixedips):
"""(Re)starts a dnsmasq server for a given network.
If a dnsmasq instance is already running then send a HUP
signal causing it to reload, otherwise spawn a new instance.
"""
conffile = _dhcp_file(dev, 'conf')
optsfile = _dhcp_file(dev, 'opts')
write_to_file(optsfile, get_dhcp_opts(context, network_ref, fixedips))
os.chmod(optsfile, 0o644)
_add_dhcp_mangle_rule(dev)
# Make sure dnsmasq can actually read it (it setuid()s to "nobody")
os.chmod(conffile, 0o644)
pid = _dnsmasq_pid_for(dev)
# if dnsmasq is already running, then tell it to reload
if pid:
if is_pid_cmdline_correct(pid, conffile.split('/')[-1]):
try:
_execute('kill', '-HUP', pid, run_as_root=True)
_add_dnsmasq_accept_rules(dev)
return
except Exception as exc:
LOG.error(_LE('kill -HUP dnsmasq threw %s'), exc)
else:
LOG.debug('Pid %d is stale, relaunching dnsmasq', pid)
cmd = ['env',
'CONFIG_FILE=%s' % jsonutils.dumps(CONF.dhcpbridge_flagfile),
'NETWORK_ID=%s' % str(network_ref['id']),
'dnsmasq',
'--strict-order',
'--bind-interfaces',
'--conf-file=%s' % CONF.dnsmasq_config_file,
'--pid-file=%s' % _dhcp_file(dev, 'pid'),
'--dhcp-optsfile=%s' % _dhcp_file(dev, 'opts'),
'--listen-address=%s' % network_ref['dhcp_server'],
'--except-interface=lo',
'--dhcp-range=set:%s,%s,static,%s,%ss' %
(network_ref['label'],
network_ref['dhcp_start'],
network_ref['netmask'],
CONF.dhcp_lease_time),
'--dhcp-lease-max=%s' % len(netaddr.IPNetwork(network_ref['cidr'])),
'--dhcp-hostsfile=%s' % _dhcp_file(dev, 'conf'),
'--dhcp-script=%s' % CONF.dhcpbridge,
'--no-hosts',
'--leasefile-ro']
# dnsmasq currently gives an error for an empty domain,
# rather than ignoring. So only specify it if defined.
if CONF.dhcp_domain:
cmd.append('--domain=%s' % CONF.dhcp_domain)
dns_servers = CONF.dns_server
if CONF.use_network_dns_servers:
if network_ref.get('dns1'):
dns_servers.append(network_ref.get('dns1'))
if network_ref.get('dns2'):
dns_servers.append(network_ref.get('dns2'))
if network_ref['multi_host']:
cmd.append('--addn-hosts=%s' % _dhcp_file(dev, 'hosts'))
if dns_servers:
cmd.append('--no-resolv')
for dns_server in dns_servers:
cmd.append('--server=%s' % dns_server)
_execute(*cmd, run_as_root=True)
_add_dnsmasq_accept_rules(dev)
@utils.synchronized('radvd_start')
def update_ra(context, dev, network_ref):
conffile = _ra_file(dev, 'conf')
conf_str = """
interface %s
{
AdvSendAdvert on;
MinRtrAdvInterval 3;
MaxRtrAdvInterval 10;
prefix %s
{
AdvOnLink on;
AdvAutonomous on;
};
};
""" % (dev, network_ref['cidr_v6'])
write_to_file(conffile, conf_str)
# Make sure radvd can actually read it (it setuid()s to "nobody")
os.chmod(conffile, 0o644)
pid = _ra_pid_for(dev)
# if radvd is already running, then tell it to reload
if pid:
if is_pid_cmdline_correct(pid, conffile):
try:
_execute('kill', pid, run_as_root=True)
except Exception as exc:
LOG.error(_LE('killing radvd threw %s'), exc)
else:
LOG.debug('Pid %d is stale, relaunching radvd', pid)
cmd = ['radvd',
'-C', '%s' % _ra_file(dev, 'conf'),
'-p', '%s' % _ra_file(dev, 'pid')]
_execute(*cmd, run_as_root=True)
def _host_lease(fixedip):
"""Return a host string for an address in leasefile format."""
timestamp = timeutils.utcnow()
seconds_since_epoch = calendar.timegm(timestamp.utctimetuple())
return '%d %s %s %s *' % (seconds_since_epoch + CONF.dhcp_lease_time,
fixedip.virtual_interface.address,
fixedip.address,
fixedip.instance.hostname or '*')
def _host_dhcp_network(vif_id):
return 'NW-%s' % vif_id
def _host_dhcp(fixedip):
"""Return a host string for an address in dhcp-host format."""
# NOTE(cfb): dnsmasq on linux only supports 64 characters in the hostname
# field (LP #1238910). Since the . counts as a character we need
# to truncate the hostname to only 63 characters.
hostname = fixedip.instance.hostname
if len(hostname) > 63:
LOG.warning(_LW('hostname %s too long, truncating.') % (hostname))
hostname = fixedip.instance.hostname[:2] + '-' +\
fixedip.instance.hostname[-60:]
if CONF.use_single_default_gateway:
net = _host_dhcp_network(fixedip.virtual_interface_id)
return '%s,%s.%s,%s,net:%s' % (fixedip.virtual_interface.address,
hostname,
CONF.dhcp_domain,
fixedip.address,
net)
else:
return '%s,%s.%s,%s' % (fixedip.virtual_interface.address,
hostname,
CONF.dhcp_domain,
fixedip.address)
def _host_dns(fixedip):
return '%s\t%s.%s' % (fixedip.address,
fixedip.instance.hostname,
CONF.dhcp_domain)
def _host_dhcp_opts(vif_id=None, gateway=None):
"""Return an empty gateway option."""
values = []
if vif_id is not None:
values.append(_host_dhcp_network(vif_id))
# NOTE(vish): 3 is the dhcp option for gateway.
values.append('3')
if gateway:
values.append('%s' % gateway)
return ','.join(values)
def _execute(*cmd, **kwargs):
"""Wrapper around utils._execute for fake_network."""
if CONF.fake_network:
LOG.debug('FAKE NET: %s', ' '.join(map(str, cmd)))
return 'fake', 0
else:
return utils.execute(*cmd, **kwargs)
def device_exists(device):
"""Check if ethernet device exists."""
return os.path.exists('/sys/class/net/%s' % device)
def _dhcp_file(dev, kind):
"""Return path to a pid, leases, hosts or conf file for a bridge/device."""
fileutils.ensure_tree(CONF.networks_path)
return os.path.abspath('%s/nova-%s.%s' % (CONF.networks_path,
dev,
kind))
def _ra_file(dev, kind):
"""Return path to a pid or conf file for a bridge/device."""
fileutils.ensure_tree(CONF.networks_path)
return os.path.abspath('%s/nova-ra-%s.%s' % (CONF.networks_path,
dev,
kind))
def _dnsmasq_pid_for(dev):
"""Returns the pid for prior dnsmasq instance for a bridge/device.
Returns None if no pid file exists.
If machine has rebooted pid might be incorrect (caller should check).
"""
pid_file = _dhcp_file(dev, 'pid')
if os.path.exists(pid_file):
try:
with open(pid_file, 'r') as f:
return int(f.read())
except (ValueError, IOError):
return None
def _ra_pid_for(dev):
"""Returns the pid for prior radvd instance for a bridge/device.
Returns None if no pid file exists.
If machine has rebooted pid might be incorrect (caller should check).
"""
pid_file = _ra_file(dev, 'pid')
if os.path.exists(pid_file):
with open(pid_file, 'r') as f:
return int(f.read())
def _ip_bridge_cmd(action, params, device):
"""Build commands to add/del ips to bridges/devices."""
cmd = ['ip', 'addr', action]
cmd.extend(params)
cmd.extend(['dev', device])
return cmd
def _set_device_mtu(dev, mtu=None):
"""Set the device MTU."""
if not mtu:
mtu = CONF.network_device_mtu
if mtu:
utils.execute('ip', 'link', 'set', dev, 'mtu',
mtu, run_as_root=True,
check_exit_code=[0, 2, 254])
def _create_veth_pair(dev1_name, dev2_name):
"""Create a pair of veth devices with the specified names,
deleting any previous devices with those names.
"""
for dev in [dev1_name, dev2_name]:
delete_net_dev(dev)
utils.execute('ip', 'link', 'add', dev1_name, 'type', 'veth', 'peer',
'name', dev2_name, run_as_root=True)
for dev in [dev1_name, dev2_name]:
utils.execute('ip', 'link', 'set', dev, 'up', run_as_root=True)
utils.execute('ip', 'link', 'set', dev, 'promisc', 'on',
run_as_root=True)
_set_device_mtu(dev)
def _ovs_vsctl(args):
full_args = ['ovs-vsctl', '--timeout=%s' % CONF.ovs_vsctl_timeout] + args
try:
return utils.execute(*full_args, run_as_root=True)
except Exception as e:
LOG.error(_LE("Unable to execute %(cmd)s. Exception: %(exception)s"),
{'cmd': full_args, 'exception': e})
raise exception.AgentError(method=full_args)
def create_ovs_vif_port(bridge, dev, iface_id, mac, instance_id):
_ovs_vsctl(['--', '--if-exists', 'del-port', dev, '--',
'add-port', bridge, dev,
'--', 'set', 'Interface', dev,
'external-ids:iface-id=%s' % iface_id,
'external-ids:iface-status=active',
'external-ids:attached-mac=%s' % mac,
'external-ids:vm-uuid=%s' % instance_id])
_set_device_mtu(dev)
def delete_ovs_vif_port(bridge, dev):
_ovs_vsctl(['--', '--if-exists', 'del-port', bridge, dev])
delete_net_dev(dev)
def ovs_set_vhostuser_port_type(dev):
_ovs_vsctl(['--', 'set', 'Interface', dev, 'type=dpdkvhostuser'])
def create_ivs_vif_port(dev, iface_id, mac, instance_id):
utils.execute('ivs-ctl', 'add-port',
dev, run_as_root=True)
def delete_ivs_vif_port(dev):
utils.execute('ivs-ctl', 'del-port', dev,
run_as_root=True)
utils.execute('ip', 'link', 'delete', dev,
run_as_root=True)
def create_tap_dev(dev, mac_address=None):
if not device_exists(dev):
try:
# First, try with 'ip'
utils.execute('ip', 'tuntap', 'add', dev, 'mode', 'tap',
run_as_root=True, check_exit_code=[0, 2, 254])
except processutils.ProcessExecutionError:
# Second option: tunctl
utils.execute('tunctl', '-b', '-t', dev, run_as_root=True)
if mac_address:
utils.execute('ip', 'link', 'set', dev, 'address', mac_address,
run_as_root=True, check_exit_code=[0, 2, 254])
utils.execute('ip', 'link', 'set', dev, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
def delete_net_dev(dev):
"""Delete a network device only if it exists."""
if device_exists(dev):
try:
utils.execute('ip', 'link', 'delete', dev, run_as_root=True,
check_exit_code=[0, 2, 254])
LOG.debug("Net device removed: '%s'", dev)
except processutils.ProcessExecutionError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed removing net device: '%s'"), dev)
def delete_bridge_dev(dev):
"""Delete a network bridge."""
if device_exists(dev):
try:
utils.execute('ip', 'link', 'set', dev, 'down', run_as_root=True)
utils.execute('brctl', 'delbr', dev, run_as_root=True)
except processutils.ProcessExecutionError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed removing bridge device: '%s'"), dev)
# Similar to compute virt layers, the Linux network node
# code uses a flexible driver model to support different ways
# of creating ethernet interfaces and attaching them to the network.
# In the case of a network host, these interfaces
# act as gateway/dhcp/vpn/etc. endpoints not VM interfaces.
interface_driver = None
def _get_interface_driver():
global interface_driver
if not interface_driver:
interface_driver = importutils.import_object(
CONF.linuxnet_interface_driver)
return interface_driver
def plug(network, mac_address, gateway=True):
return _get_interface_driver().plug(network, mac_address, gateway)
def unplug(network):
return _get_interface_driver().unplug(network)
def get_dev(network):
return _get_interface_driver().get_dev(network)
class LinuxNetInterfaceDriver(object):
"""Abstract class that defines generic network host API
for all Linux interface drivers.
"""
def plug(self, network, mac_address):
"""Create Linux device, return device name."""
raise NotImplementedError()
def unplug(self, network):
"""Destroy Linux device, return device name."""
raise NotImplementedError()
def get_dev(self, network):
"""Get device name."""
raise NotImplementedError()
# plugs interfaces using Linux Bridge
class LinuxBridgeInterfaceDriver(LinuxNetInterfaceDriver):
def plug(self, network, mac_address, gateway=True):
vlan = network.get('vlan')
if vlan is not None:
iface = CONF.vlan_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.ensure_vlan_bridge(
vlan,
network['bridge'],
iface,
network,
mac_address,
network.get('mtu'))
iface = 'vlan%s' % vlan
else:
iface = CONF.flat_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.ensure_bridge(
network['bridge'],
iface,
network, gateway)
if network['share_address'] or CONF.share_dhcp_address:
isolate_dhcp_address(iface, network['dhcp_server'])
# NOTE(vish): applying here so we don't get a lock conflict
iptables_manager.apply()
return network['bridge']
def unplug(self, network, gateway=True):
vlan = network.get('vlan')
if vlan is not None:
iface = 'vlan%s' % vlan
LinuxBridgeInterfaceDriver.remove_vlan_bridge(vlan,
network['bridge'])
else:
iface = CONF.flat_interface or network['bridge_interface']
LinuxBridgeInterfaceDriver.remove_bridge(network['bridge'],
gateway)
if network['share_address'] or CONF.share_dhcp_address:
remove_isolate_dhcp_address(iface, network['dhcp_server'])
iptables_manager.apply()
return self.get_dev(network)
def get_dev(self, network):
return network['bridge']
@staticmethod
def ensure_vlan_bridge(vlan_num, bridge, bridge_interface,
net_attrs=None, mac_address=None,
mtu=None):
"""Create a vlan and bridge unless they already exist."""
interface = LinuxBridgeInterfaceDriver.ensure_vlan(vlan_num,
bridge_interface, mac_address,
mtu)
LinuxBridgeInterfaceDriver.ensure_bridge(bridge, interface, net_attrs)
return interface
@staticmethod
def remove_vlan_bridge(vlan_num, bridge):
"""Delete a bridge and vlan."""
LinuxBridgeInterfaceDriver.remove_bridge(bridge)
LinuxBridgeInterfaceDriver.remove_vlan(vlan_num)
@staticmethod
@utils.synchronized('lock_vlan', external=True)
def ensure_vlan(vlan_num, bridge_interface, mac_address=None, mtu=None):
"""Create a vlan unless it already exists."""
interface = 'vlan%s' % vlan_num
if not device_exists(interface):
LOG.debug('Starting VLAN interface %s', interface)
_execute('ip', 'link', 'add', 'link', bridge_interface,
'name', interface, 'type', 'vlan',
'id', vlan_num, run_as_root=True,
check_exit_code=[0, 2, 254])
# (danwent) the bridge will inherit this address, so we want to
# make sure it is the value set from the NetworkManager
if mac_address:
_execute('ip', 'link', 'set', interface, 'address',
mac_address, run_as_root=True,
check_exit_code=[0, 2, 254])
_execute('ip', 'link', 'set', interface, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
# NOTE(vish): set mtu every time to ensure that changes to mtu get
# propogated
_set_device_mtu(interface, mtu)
return interface
@staticmethod
@utils.synchronized('lock_vlan', external=True)
def remove_vlan(vlan_num):
"""Delete a vlan."""
vlan_interface = 'vlan%s' % vlan_num
delete_net_dev(vlan_interface)
@staticmethod
@utils.synchronized('lock_bridge', external=True)
def ensure_bridge(bridge, interface, net_attrs=None, gateway=True,
filtering=True):
"""Create a bridge unless it already exists.
:param interface: the interface to create the bridge on.
:param net_attrs: dictionary with attributes used to create bridge.
:param gateway: whether or not the bridge is a gateway.
:param filtering: whether or not to create filters on the bridge.
If net_attrs is set, it will add the net_attrs['gateway'] to the bridge
using net_attrs['broadcast'] and net_attrs['cidr']. It will also add
the ip_v6 address specified in net_attrs['cidr_v6'] if use_ipv6 is set.
The code will attempt to move any ips that already exist on the
interface onto the bridge and reset the default gateway if necessary.
"""
if not device_exists(bridge):
LOG.debug('Starting Bridge %s', bridge)
out, err = _execute('brctl', 'addbr', bridge,
check_exit_code=False, run_as_root=True)
if (err and err != "device %s already exists; can't create "
"bridge with the same name\n" % (bridge)):
msg = _('Failed to add bridge: %s') % err
raise exception.NovaException(msg)
_execute('brctl', 'setfd', bridge, 0, run_as_root=True)
# _execute('brctl setageing %s 10' % bridge, run_as_root=True)
_execute('brctl', 'stp', bridge, 'off', run_as_root=True)
# (danwent) bridge device MAC address can't be set directly.
# instead it inherits the MAC address of the first device on the
# bridge, which will either be the vlan interface, or a
# physical NIC.
_execute('ip', 'link', 'set', bridge, 'up', run_as_root=True)
if interface:
LOG.debug('Adding interface %(interface)s to bridge %(bridge)s',
{'interface': interface, 'bridge': bridge})
out, err = _execute('brctl', 'addif', bridge, interface,
check_exit_code=False, run_as_root=True)
if (err and err != "device %s is already a member of a bridge; "
"can't enslave it to bridge %s.\n" % (interface, bridge)):
msg = _('Failed to add interface: %s') % err
raise exception.NovaException(msg)
out, err = _execute('ip', 'link', 'set', interface, 'up',
check_exit_code=False, run_as_root=True)
# NOTE(vish): This will break if there is already an ip on the
# interface, so we move any ips to the bridge
# NOTE(danms): We also need to copy routes to the bridge so as
# not to break existing connectivity on the interface
old_routes = []
out, err = _execute('ip', 'route', 'show', 'dev', interface)
for line in out.split('\n'):
fields = line.split()
if fields and 'via' in fields:
old_routes.append(fields)
_execute('ip', 'route', 'del', *fields,
run_as_root=True)
out, err = _execute('ip', 'addr', 'show', 'dev', interface,
'scope', 'global')
for line in out.split('\n'):
fields = line.split()
if fields and fields[0] == 'inet':
if fields[-2] in ('secondary', 'dynamic', ):
params = fields[1:-2]
else:
params = fields[1:-1]
_execute(*_ip_bridge_cmd('del', params, fields[-1]),
run_as_root=True, check_exit_code=[0, 2, 254])
_execute(*_ip_bridge_cmd('add', params, bridge),
run_as_root=True, check_exit_code=[0, 2, 254])
for fields in old_routes:
_execute('ip', 'route', 'add', *fields,
run_as_root=True)
if filtering:
# Don't forward traffic unless we were told to be a gateway
ipv4_filter = iptables_manager.ipv4['filter']
if gateway:
for rule in get_gateway_rules(bridge):
ipv4_filter.add_rule(*rule)
else:
ipv4_filter.add_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
ipv4_filter.add_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
@staticmethod
@utils.synchronized('lock_bridge', external=True)
def remove_bridge(bridge, gateway=True, filtering=True):
"""Delete a bridge."""
if not device_exists(bridge):
return
else:
if filtering:
ipv4_filter = iptables_manager.ipv4['filter']
if gateway:
for rule in get_gateway_rules(bridge):
ipv4_filter.remove_rule(*rule)
else:
drop_actions = ['DROP']
if CONF.iptables_drop_action != 'DROP':
drop_actions.append(CONF.iptables_drop_action)
for drop_action in drop_actions:
ipv4_filter.remove_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, drop_action)))
ipv4_filter.remove_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, drop_action)))
delete_bridge_dev(bridge)
# NOTE(cfb): This is a temporary fix to LP #1316621. We really want to call
# ebtables with --concurrent. In order to do that though we need
# libvirt to support this. Additionally since ebtables --concurrent
# will hang indefinitely waiting on the lock we need to teach
# oslo_concurrency.processutils how to timeout a long running
# process first. Once those are complete we can replace all of this
# with calls to ebtables --concurrent and a reasonable timeout.
def _exec_ebtables(*cmd, **kwargs):
check_exit_code = kwargs.pop('check_exit_code', True)
# List of error strings to re-try.
retry_strings = (
'Multiple ebtables programs',
)
# We always try at least once
attempts = CONF.ebtables_exec_attempts
if attempts <= 0:
attempts = 1
count = 1
while count <= attempts:
# Updated our counters if needed
sleep = CONF.ebtables_retry_interval * count
count += 1
# NOTE(cfb): ebtables reports all errors with a return code of 255.
# As such we can't know if we hit a locking error, or some
# other error (like a rule doesn't exist) so we have to
# to parse stderr.
try:
_execute(*cmd, check_exit_code=[0], **kwargs)
except processutils.ProcessExecutionError as exc:
# See if we can retry the error.
if any(error in exc.stderr for error in retry_strings):
if count > attempts and check_exit_code:
LOG.warning(_LW('%s failed. Not Retrying.'), ' '.join(cmd))
raise
else:
# We need to sleep a bit before retrying
LOG.warning(_LW("%(cmd)s failed. Sleeping %(time)s "
"seconds before retry."),
{'cmd': ' '.join(cmd), 'time': sleep})
time.sleep(sleep)
else:
# Not eligible for retry
if check_exit_code:
LOG.warning(_LW('%s failed. Not Retrying.'), ' '.join(cmd))
raise
else:
return
else:
# Success
return
@utils.synchronized('ebtables', external=True)
def ensure_ebtables_rules(rules, table='filter'):
for rule in rules:
cmd = ['ebtables', '-t', table, '-D'] + rule.split()
_exec_ebtables(*cmd, check_exit_code=False, run_as_root=True)
cmd[3] = '-I'
_exec_ebtables(*cmd, run_as_root=True)
@utils.synchronized('ebtables', external=True)
def remove_ebtables_rules(rules, table='filter'):
for rule in rules:
cmd = ['ebtables', '-t', table, '-D'] + rule.split()
_exec_ebtables(*cmd, check_exit_code=False, run_as_root=True)
def isolate_dhcp_address(interface, address):
# block arp traffic to address across the interface
rules = []
rules.append('INPUT -p ARP -i %s --arp-ip-dst %s -j DROP'
% (interface, address))
rules.append('OUTPUT -p ARP -o %s --arp-ip-src %s -j DROP'
% (interface, address))
rules.append('FORWARD -p IPv4 -i %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
rules.append('FORWARD -p IPv4 -o %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
# NOTE(vish): the above is not possible with iptables/arptables
ensure_ebtables_rules(rules)
def remove_isolate_dhcp_address(interface, address):
# block arp traffic to address across the interface
rules = []
rules.append('INPUT -p ARP -i %s --arp-ip-dst %s -j DROP'
% (interface, address))
rules.append('OUTPUT -p ARP -o %s --arp-ip-src %s -j DROP'
% (interface, address))
rules.append('FORWARD -p IPv4 -i %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
rules.append('FORWARD -p IPv4 -o %s --ip-protocol udp '
'--ip-destination-port 67:68 -j DROP'
% interface)
remove_ebtables_rules(rules)
# NOTE(vish): the above is not possible with iptables/arptables
def get_gateway_rules(bridge):
interfaces = CONF.forward_bridge_interface
if 'all' in interfaces:
return [('FORWARD', '-i %s -j ACCEPT' % bridge),
('FORWARD', '-o %s -j ACCEPT' % bridge)]
rules = []
for iface in CONF.forward_bridge_interface:
if iface:
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (bridge,
iface)))
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (iface,
bridge)))
rules.append(('FORWARD', '-i %s -o %s -j ACCEPT' % (bridge, bridge)))
rules.append(('FORWARD', '-i %s -j %s' % (bridge,
CONF.iptables_drop_action)))
rules.append(('FORWARD', '-o %s -j %s' % (bridge,
CONF.iptables_drop_action)))
return rules
# plugs interfaces using Open vSwitch
class LinuxOVSInterfaceDriver(LinuxNetInterfaceDriver):
def plug(self, network, mac_address, gateway=True):
dev = self.get_dev(network)
if not device_exists(dev):
bridge = CONF.linuxnet_ovs_integration_bridge
_ovs_vsctl(['--', '--may-exist', 'add-port', bridge, dev,
'--', 'set', 'Interface', dev, 'type=internal',
'--', 'set', 'Interface', dev,
'external-ids:iface-id=%s' % dev,
'--', 'set', 'Interface', dev,
'external-ids:iface-status=active',
'--', 'set', 'Interface', dev,
'external-ids:attached-mac=%s' % mac_address])
_execute('ip', 'link', 'set', dev, 'address', mac_address,
run_as_root=True)
_set_device_mtu(dev, network.get('mtu'))
_execute('ip', 'link', 'set', dev, 'up', run_as_root=True)
if not gateway:
# If we weren't instructed to act as a gateway then add the
# appropriate flows to block all non-dhcp traffic.
_execute('ovs-ofctl',
'add-flow', bridge, 'priority=1,actions=drop',
run_as_root=True)
_execute('ovs-ofctl', 'add-flow', bridge,
'udp,tp_dst=67,dl_dst=%s,priority=2,actions=normal' %
mac_address, run_as_root=True)
# .. and make sure iptbles won't forward it as well.
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'--in-interface %s -j %s' % (bridge,
CONF.iptables_drop_action))
iptables_manager.ipv4['filter'].add_rule('FORWARD',
'--out-interface %s -j %s' % (bridge,
CONF.iptables_drop_action))
else:
for rule in get_gateway_rules(bridge):
iptables_manager.ipv4['filter'].add_rule(*rule)
return dev
def unplug(self, network):
dev = self.get_dev(network)
bridge = CONF.linuxnet_ovs_integration_bridge
_ovs_vsctl(['--', '--if-exists', 'del-port', bridge, dev])
return dev
def get_dev(self, network):
dev = 'gw-' + str(network['uuid'][0:11])
return dev
# plugs interfaces using Linux Bridge when using NeutronManager
class NeutronLinuxBridgeInterfaceDriver(LinuxNetInterfaceDriver):
BRIDGE_NAME_PREFIX = 'brq'
GATEWAY_INTERFACE_PREFIX = 'gw-'
def plug(self, network, mac_address, gateway=True):
dev = self.get_dev(network)
bridge = self.get_bridge(network)
if not gateway:
# If we weren't instructed to act as a gateway then add the
# appropriate flows to block all non-dhcp traffic.
# .. and make sure iptbles won't forward it as well.
iptables_manager.ipv4['filter'].add_rule('FORWARD',
('--in-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
iptables_manager.ipv4['filter'].add_rule('FORWARD',
('--out-interface %s -j %s'
% (bridge, CONF.iptables_drop_action)))
return bridge
else:
for rule in get_gateway_rules(bridge):
iptables_manager.ipv4['filter'].add_rule(*rule)
create_tap_dev(dev, mac_address)
if not device_exists(bridge):
LOG.debug("Starting bridge %s ", bridge)
utils.execute('brctl', 'addbr', bridge, run_as_root=True)
utils.execute('brctl', 'setfd', bridge, str(0), run_as_root=True)
utils.execute('brctl', 'stp', bridge, 'off', run_as_root=True)
utils.execute('ip', 'link', 'set', bridge, 'address', mac_address,
run_as_root=True, check_exit_code=[0, 2, 254])
utils.execute('ip', 'link', 'set', bridge, 'up', run_as_root=True,
check_exit_code=[0, 2, 254])
LOG.debug("Done starting bridge %s", bridge)
full_ip = '%s/%s' % (network['dhcp_server'],
network['cidr'].rpartition('/')[2])
utils.execute('ip', 'address', 'add', full_ip, 'dev', bridge,
run_as_root=True, check_exit_code=[0, 2, 254])
return dev
def unplug(self, network):
dev = self.get_dev(network)
if not device_exists(dev):
return None
else:
delete_net_dev(dev)
return dev
def get_dev(self, network):
dev = self.GATEWAY_INTERFACE_PREFIX + str(network['uuid'][0:11])
return dev
def get_bridge(self, network):
bridge = self.BRIDGE_NAME_PREFIX + str(network['uuid'][0:11])
return bridge
# provide compatibility with existing configs
QuantumLinuxBridgeInterfaceDriver = NeutronLinuxBridgeInterfaceDriver
iptables_manager = IptablesManager()
def set_vf_interface_vlan(pci_addr, mac_addr, vlan=0):
pf_ifname = pci_utils.get_ifname_by_pci_address(pci_addr,
pf_interface=True)
vf_ifname = pci_utils.get_ifname_by_pci_address(pci_addr)
vf_num = pci_utils.get_vf_num_by_pci_address(pci_addr)
# Set the VF's mac address and vlan
exit_code = [0, 2, 254]
port_state = 'up' if vlan > 0 else 'down'
utils.execute('ip', 'link', 'set', pf_ifname,
'vf', vf_num,
'mac', mac_addr,
'vlan', vlan,
run_as_root=True,
check_exit_code=exit_code)
# Bring up/down the VF's interface
utils.execute('ip', 'link', 'set', vf_ifname,
port_state,
run_as_root=True,
check_exit_code=exit_code)
| apache-2.0 |
MTG/essentia | test/src/unittests/audioproblems/test_truepeakdetector.py | 1 | 4660 | #!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import numpy as np
from math import *
from essentia_test import *
from essentia import array as esarr
class TestTruePeakDetector(TestCase):
def testZero(self):
self.assertEqualVector(TruePeakDetector()(esarr(np.zeros(512)))[0],
esarr([]))
def testSinc(self):
# This test asserts that the estimated peak position is better than
# the sampled one. This test is performed over a sinc wave sampled
# with different offsets.
duration = 10 # s
fs = 1 # hz
k = 1.5 # amplitude
oversamplingFactor = 4 # factor of oversampling for the real signal
nSamples = fs * duration
time = np.arange(-nSamples/2, nSamples/2,
2 ** -oversamplingFactor, dtype='float')
samplingPoints = time[::2 ** oversamplingFactor]
def shifted_sinc(x, k, offset):
xShifted = x - offset
y = np.zeros(len(xShifted))
for idx, i in enumerate(xShifted):
if not i:
y[idx] = k
else:
y[idx] = (k * np.sin(np.pi * i) / (np.pi * i))
return y
sampledError = 0
estimatedError = 0
its = 10
for offset in np.linspace(0, 1, its):
yReal = shifted_sinc(time, k, offset)
realPeak = np.max(yReal)
y = shifted_sinc(samplingPoints, k, offset)
sampledPeak = np.max(y)
sampledError += np.abs(sampledPeak - realPeak)
_, processed = TruePeakDetector(version=2)(y.astype(np.float32))
estimatedPeak = np.max(processed)
estimatedError += np.abs(estimatedPeak - realPeak)
sampledError /= float(its)
estimatedError /= float(its)
# Check that the peak stimation error is reduced.
assert(estimatedError < sampledError)
def testInvalidParam(self):
self.assertConfigureFails(TruePeakDetector(), {'sampleRate': -1})
self.assertConfigureFails(TruePeakDetector(), {'oversamplingFactor': 0})
self.assertConfigureFails(TruePeakDetector(), {'quality': 5})
def testDifferentBitDepths(self):
audio16 = MonoLoader(filename=join(testdata.audio_dir, 'recorded/cat_purrrr.wav'),
sampleRate=44100)()
audio24 = MonoLoader(filename=join(testdata.audio_dir, 'recorded/cat_purrrr24bit.wav'),
sampleRate=44100)()
audio32 = MonoLoader(filename=join(testdata.audio_dir, 'recorded/cat_purrrr32bit.wav'),
sampleRate=44100)()
data = [audio16, audio24, audio32]
peakDetector = TruePeakDetector()
results = [peakDetector(x)[0] for x in data]
# The algorithm should detect the same frames regardless the bit size.
for version in results[:-1]:
self.assertEqualVector(results[-1], version)
def testDCblock(self):
# This negative peak is hidden by a huge amount of positive dc offset.
# It should be detected when the optional dc blocker is on.
oversamplingFactor = 4
signal = np.zeros(512)
peakLoc = 256
signal[peakLoc] = -1.4
signalWithDC = signal + .5
withoutDC = TruePeakDetector(blockDC=True,
oversamplingFactor=oversamplingFactor,
version=2)(signalWithDC.astype(np.float32))[0]
withDC = TruePeakDetector(blockDC=False,
oversamplingFactor=oversamplingFactor,
version=2)(signalWithDC.astype(np.float32))[0]
assert(withDC.size == 0)
assert(peakLoc in withoutDC)
suite = allTests(TestTruePeakDetector)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 |
iansf/sky_engine | sky/engine/build/scripts/make_names.py | 27 | 3746 | #!/usr/bin/env python
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import hasher
import in_generator
import template_expander
import name_utilities
def _symbol(entry):
if entry['Symbol'] is not None:
return entry['Symbol']
# FIXME: Remove this special case for the ugly x-webkit-foo attributes.
if entry['name'].startswith('-webkit-'):
return entry['name'].replace('-', '_')[1:]
return name_utilities.cpp_name(entry).replace('-', '_')
class MakeNamesWriter(in_generator.Writer):
defaults = {
'Conditional': None, # FIXME: Add support for Conditional.
'ImplementedAs': None,
'RuntimeEnabled': None, # What should we do for runtime-enabled features?
'Symbol': None,
}
default_parameters = {
'export': '',
'namespace': '',
'suffix': '',
}
filters = {
'cpp_name': name_utilities.cpp_name,
'enable_conditional': name_utilities.enable_conditional_if_endif,
'hash': hasher.hash,
'script_name': name_utilities.script_name,
'symbol': _symbol,
'to_macro_style': name_utilities.to_macro_style,
}
def __init__(self, in_file_path):
super(MakeNamesWriter, self).__init__(in_file_path)
namespace = self.in_file.parameters['namespace'].strip('"')
suffix = self.in_file.parameters['suffix'].strip('"')
export = self.in_file.parameters['export'].strip('"')
assert namespace, 'A namespace is required.'
self._outputs = {
(namespace + suffix + 'Names.h'): self.generate_header,
(namespace + suffix + 'Names.cpp'): self.generate_implementation,
}
self._template_context = {
'namespace': namespace,
'suffix': suffix,
'export': export,
'entries': self.in_file.name_dictionaries,
}
@template_expander.use_jinja("MakeNames.h.tmpl", filters=filters)
def generate_header(self):
return self._template_context
@template_expander.use_jinja("MakeNames.cpp.tmpl", filters=filters)
def generate_implementation(self):
return self._template_context
if __name__ == "__main__":
in_generator.Maker(MakeNamesWriter).main(sys.argv)
| bsd-3-clause |
pgdr/ert | python/tests/gui/ide/test_configuration_line_parser.py | 1 | 3748 | from ert_gui.ide.keywords import ConfigurationLineParser
from ert_gui.ide.keywords.data import Argument
from ecl.test import ExtendedTestCase
class ConfigurationLineParserTest(ExtendedTestCase):
def test_comments(self):
keyword_parser = ConfigurationLineParser()
test_line = "-- comment"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), 0)
self.assertFalse(keyword_parser.hasKeyword())
self.assertIsNone(keyword_parser.keyword())
self.assertEqual(keyword_parser.uncommentedText(), "")
test_line = " -- comment"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), 5)
self.assertFalse(keyword_parser.hasKeyword())
self.assertIsNone(keyword_parser.keyword())
self.assertEqual(keyword_parser.uncommentedText(), " ")
test_line = "NUM_REALIZATIONS-- comment"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), 16)
self.assertTrue(keyword_parser.hasKeyword())
self.assertEqual(keyword_parser.keyword().value(), "NUM_REALIZATIONS")
self.assertEqual(keyword_parser.uncommentedText(), "NUM_REALIZATIONS")
test_line = "NUM_REALIZATIONS -- comment"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), 17)
self.assertTrue(keyword_parser.hasKeyword())
self.assertEqual(keyword_parser.keyword().value(), "NUM_REALIZATIONS")
self.assertEqual(keyword_parser.uncommentedText(), "NUM_REALIZATIONS ")
def test_argument_text(self):
keyword_parser = ConfigurationLineParser()
test_line = "NUM_REALIZATIONS 25"
keyword_parser.parseLine(test_line)
self.assertFalse(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), -1)
self.assertTrue(keyword_parser.hasKeyword())
self.assertEqual(keyword_parser.keyword().value(), "NUM_REALIZATIONS")
self.assertEqual(keyword_parser.uncommentedText(), "NUM_REALIZATIONS 25")
self.assertEqual(keyword_parser.argumentsText(), " 25")
test_line = "NUM_REALIZATIONS 25--comment"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasComment())
self.assertEqual(keyword_parser.commentIndex(), 19)
self.assertTrue(keyword_parser.hasKeyword())
self.assertEqual(keyword_parser.keyword().value(), "NUM_REALIZATIONS")
self.assertEqual(keyword_parser.uncommentedText(), "NUM_REALIZATIONS 25")
self.assertEqual(keyword_parser.argumentsText(), " 25")
test_line = "NUM_REALIZATIONS 25 something_else"
keyword_parser.parseLine(test_line)
self.assertTrue(keyword_parser.hasKeyword())
self.assertEqual(keyword_parser.keyword().value(), "NUM_REALIZATIONS")
self.assertEqual(keyword_parser.uncommentedText(), "NUM_REALIZATIONS 25 something_else")
self.assertEqual(keyword_parser.argumentsText(), " 25 something_else")
def test_argument_list(self):
keyword_parser = ConfigurationLineParser()
test_line = "KEYWORD arg1 arg2"
keyword_parser.parseLine(test_line)
keyword = keyword_parser.keyword()
self.assertEqual(keyword.value(), "KEYWORD")
arguments = keyword_parser.arguments()
self.assertEqual(arguments[0].value(), "arg1")
self.assertEqual(arguments[1].value(), "arg2")
| gpl-3.0 |
Fireblend/chromium-crosswalk | tools/telemetry/telemetry/internal/actions/page_action.py | 29 | 4383 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
from telemetry import decorators
GESTURE_SOURCE_DEFAULT = 'DEFAULT'
GESTURE_SOURCE_MOUSE = 'MOUSE'
GESTURE_SOURCE_TOUCH = 'TOUCH'
SUPPORTED_GESTURE_SOURCES = (GESTURE_SOURCE_DEFAULT,
GESTURE_SOURCE_MOUSE,
GESTURE_SOURCE_TOUCH)
class PageActionNotSupported(Exception):
pass
class PageActionFailed(Exception):
pass
class PageAction(object):
"""Represents an action that a user might try to perform to a page."""
def WillRunAction(self, tab):
"""Override to do action-specific setup before
Test.WillRunAction is called."""
pass
def RunAction(self, tab):
raise NotImplementedError()
def CleanUp(self, tab):
pass
def EvaluateCallbackWithElement(
tab, callback_js, selector=None, text=None, element_function=None,
wait=False, timeout_in_seconds=60):
"""Evaluates the JavaScript callback with the given element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Returns:
The callback's return value, if any. The return value must be
convertible to JSON.
Args:
tab: A telemetry.core.Tab object.
callback_js: The JavaScript callback to call (as string).
The callback receive 2 parameters: the element, and information
string about what method was used to retrieve the element.
Example: '''
function(element, info) {
if (!element) {
throw Error('Can not find element: ' + info);
}
element.click()
}'''
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'(function() { return foo.element; })()'.
wait: Whether to wait for the return value to be true.
timeout_in_seconds: The timeout for wait (if waiting).
"""
count = 0
info_msg = ''
if element_function is not None:
count = count + 1
info_msg = 'using element_function "%s"' % re.escape(element_function)
if selector is not None:
count = count + 1
info_msg = 'using selector "%s"' % _EscapeSelector(selector)
element_function = 'document.querySelector(\'%s\')' % _EscapeSelector(
selector)
if text is not None:
count = count + 1
info_msg = 'using exact text match "%s"' % re.escape(text)
element_function = '''
(function() {
function _findElement(element, text) {
if (element.innerHTML == text) {
return element;
}
var childNodes = element.childNodes;
for (var i = 0, len = childNodes.length; i < len; ++i) {
var found = _findElement(childNodes[i], text);
if (found) {
return found;
}
}
return null;
}
return _findElement(document, '%s');
})()''' % text
if count != 1:
raise PageActionFailed(
'Must specify 1 way to retrieve element, but %s was specified.' % count)
code = '''
(function() {
var element = %s;
var callback = %s;
return callback(element, '%s');
})()''' % (element_function, callback_js, info_msg)
if wait:
tab.WaitForJavaScriptExpression(code, timeout_in_seconds)
return True
else:
return tab.EvaluateJavaScript(code)
def _EscapeSelector(selector):
return selector.replace('\'', '\\\'')
@decorators.Cache
def IsGestureSourceTypeSupported(tab, gesture_source_type):
# TODO(dominikg): remove once support for
# 'chrome.gpuBenchmarking.gestureSourceTypeSupported' has
# been rolled into reference build.
if tab.EvaluateJavaScript("""
typeof chrome.gpuBenchmarking.gestureSourceTypeSupported ===
'undefined'"""):
return (tab.browser.platform.GetOSName() != 'mac' or
gesture_source_type.lower() != 'touch')
return tab.EvaluateJavaScript("""
chrome.gpuBenchmarking.gestureSourceTypeSupported(
chrome.gpuBenchmarking.%s_INPUT)"""
% (gesture_source_type.upper()))
| bsd-3-clause |
ahu-odoo/odoo | addons/hw_posbox_homepage/__init__.py | 1894 | 1075 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ecosoft-odoo/odoo | addons/purchase_double_validation/purchase_double_validation_installer.py | 432 | 2315 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_config_settings(osv.osv_memory):
_inherit = 'purchase.config.settings'
_columns = {
'limit_amount': fields.integer('limit to require a second approval',required=True,
help="Amount after which validation of purchase is required."),
}
_defaults = {
'limit_amount': 5000,
}
def get_default_limit_amount(self, cr, uid, fields, context=None):
ir_model_data = self.pool.get('ir.model.data')
transition = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
field, value = transition.condition.split('<', 1)
return {'limit_amount': int(value)}
def set_limit_amount(self, cr, uid, ids, context=None):
ir_model_data = self.pool.get('ir.model.data')
config = self.browse(cr, uid, ids[0], context)
waiting = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_gt')
waiting.write({'condition': 'amount_total >= %s' % config.limit_amount})
confirm = ir_model_data.get_object(cr, uid, 'purchase_double_validation', 'trans_confirmed_double_lt')
confirm.write({'condition': 'amount_total < %s' % config.limit_amount})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
RyanLucchese/energi | contrib/spendfrom/spendfrom.py | 1 | 10023 | #!/usr/bin/env python
#
# Use the raw transactions API to spend EGIs received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a energid or Energi-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the Energi Core data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/EnergiCore/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "EnergiCore")
return os.path.expanduser("~/.energicore")
def read_bitcoin_config(dbdir):
"""Read the energi.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "energi.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a Energi Core JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19796 if testnet else 9796
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the energid we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(energid):
info = energid.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
energid.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = energid.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(energid):
address_summary = dict()
address_to_account = dict()
for info in energid.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = energid.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = energid.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-energi-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(energid, fromaddresses, toaddress, amount, fee):
all_coins = list_available(energid)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to energid.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = energid.createrawtransaction(inputs, outputs)
signed_rawtx = energid.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(energid, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = energid.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(energid, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = energid.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(energid, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get energis from")
parser.add_option("--to", dest="to", default=None,
help="address to get send energis to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of energi.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
energid = connect_JSON(config)
if options.amount is None:
address_summary = list_available(energid)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(energid) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(energid, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(energid, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = energid.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| mit |
mateor/pants | src/python/pants/base/deprecated.py | 4 | 7164 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import inspect
import warnings
from functools import wraps
import six
from packaging.version import InvalidVersion, Version
from pants.version import PANTS_SEMVER
class DeprecationApplicationError(Exception):
"""The base exception type thrown for any form of @deprecation application error."""
class MissingRemovalVersionError(DeprecationApplicationError):
"""Indicates the required removal_version was not supplied."""
class BadRemovalVersionError(DeprecationApplicationError):
"""Indicates the supplied removal_version was not a valid semver string."""
class CodeRemovedError(Exception):
"""Indicates that the removal_version is not in the future.
I.e., that the option/function/module with that removal_version has already been removed.
Note, the code in question may not actually have been excised from the codebase yet, but
it may be at any time, and no control paths access it.
"""
class BadDecoratorNestingError(DeprecationApplicationError):
"""Indicates the @deprecated decorator was innermost in a sequence of layered decorators."""
def get_deprecated_tense(removal_version, future_tense='will be', past_tense='was'):
"""Provides the grammatical tense for a given deprecated version vs the current version."""
return future_tense if (Version(removal_version) >= PANTS_SEMVER) else past_tense
def validate_removal_semver(removal_version):
"""Validates that removal_version is a valid semver.
If so, returns that semver. Raises an error otherwise.
:param str removal_version: The pantsbuild.pants version which will remove the deprecated entity.
:rtype: `packaging.version.Version`
:raises DeprecationApplicationError: if the removal_version parameter is invalid.
"""
if removal_version is None:
raise MissingRemovalVersionError('The removal version must be provided.')
if not isinstance(removal_version, six.string_types):
raise BadRemovalVersionError('The removal_version must be a version string.')
try:
# NB: packaging will see versions like 1.a.0 as 1a0, and are "valid"
# We explicitly want our versions to be of the form x.y.z.
v = Version(removal_version)
if len(v.base_version.split('.')) != 3:
raise BadRemovalVersionError('The given removal version {} is not a valid version: '
'{}'.format(removal_version, removal_version))
return v
except InvalidVersion as e:
raise BadRemovalVersionError('The given removal version {} is not a valid version: '
'{}'.format(removal_version, e))
def warn_or_error(removal_version, deprecated_entity_description, hint=None, stacklevel=3):
"""Check the removal_version against the current pants version.
Issues a warning if the removal version is > current pants version, or an error otherwise.
:param string removal_version: The pantsbuild.pants version at which the deprecated entity
will be/was removed.
:param string deprecated_entity_description: A short description of the deprecated entity, that
we can embed in warning/error messages.
:param string hint: A message describing how to migrate from the removed entity.
:param int stacklevel: The stacklevel to pass to warnings.warn.
:raises DeprecationApplicationError: if the removal_version parameter is invalid.
"""
removal_semver = validate_removal_semver(removal_version)
msg = 'DEPRECATED: {} {} removed in version {}.'.format(deprecated_entity_description,
get_deprecated_tense(removal_version), removal_version)
if hint:
msg += '\n {}'.format(hint)
if removal_semver > PANTS_SEMVER:
warnings.warn(msg, DeprecationWarning, stacklevel=stacklevel)
else:
raise CodeRemovedError(msg)
def deprecated_conditional(predicate,
removal_version,
entity_description,
hint_message=None,
stacklevel=4):
"""Marks a certain configuration as deprecated.
The predicate is used to determine if that configuration is deprecated. It is a function that
will be called, if true, then the deprecation warning will issue.
:param () -> bool predicate: A function that returns True if the deprecation warning should be on.
:param string removal_version: The pants version which will remove the deprecated functionality.
:param string entity_description: A description of the deprecated entity.
:param string hint_message: An optional hint pointing to alternatives to the deprecation.
:param int stacklevel: How far up in the stack do we go to find the calling fn to report
:raises DeprecationApplicationError if the deprecation is applied improperly.
"""
validate_removal_semver(removal_version)
if predicate():
warn_or_error(removal_version, entity_description, hint_message, stacklevel=stacklevel)
def deprecated(removal_version, hint_message=None):
"""Marks a function or method as deprecated.
A removal version must be supplied and it must be greater than the current 'pantsbuild.pants'
version.
When choosing a removal version there is a natural tension between the code-base, which benefits
from short deprecation cycles, and the user-base which may prefer to deal with deprecations less
frequently. As a rule of thumb, if the hint message can fully convey corrective action
succinctly and you judge the impact to be on the small side (effects custom tasks as opposed to
effecting BUILD files), lean towards the next release version as the removal version; otherwise,
consider initiating a discussion to win consensus on a reasonable removal version.
:param str removal_version: The pantsbuild.pants version which will remove the deprecated
function.
:param str hint_message: An optional hint pointing to alternatives to the deprecation.
:raises DeprecationApplicationError if the @deprecation is applied improperly.
"""
validate_removal_semver(removal_version)
def decorator(func):
if not inspect.isfunction(func):
raise BadDecoratorNestingError('The @deprecated decorator must be applied innermost of all '
'decorators.')
func_full_name = '{}.{}'.format(func.__module__, func.__name__)
@wraps(func)
def wrapper(*args, **kwargs):
warn_or_error(removal_version, func_full_name, hint_message)
return func(*args, **kwargs)
return wrapper
return decorator
def deprecated_module(removal_version, hint_message=None):
"""Marks an entire module as deprecated.
Add a call to this at the top of the deprecated module, and it will print a warning message
when the module is imported.
Arguments are as for deprecated(), above.
"""
warn_or_error(removal_version, 'module', hint_message)
| apache-2.0 |
jtrobec/pants | src/python/pants/backend/core/tasks/dependees.py | 1 | 3012 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import defaultdict
from pants.backend.core.tasks.target_filter_task_mixin import TargetFilterTaskMixin
from pants.base.build_environment import get_buildroot
from pants.task.console_task import ConsoleTask
class ReverseDepmap(TargetFilterTaskMixin, ConsoleTask):
"""List all targets that depend on any of the input targets."""
@classmethod
def register_options(cls, register):
super(ReverseDepmap, cls).register_options(register)
register('--transitive', default=False, action='store_true',
help='List transitive dependees.')
register('--closed', default=False, action='store_true',
help='Include the input targets in the output along with the dependees.')
def __init__(self, *args, **kwargs):
super(ReverseDepmap, self).__init__(*args, **kwargs)
self._transitive = self.get_options().transitive
self._closed = self.get_options().closed
self._spec_excludes = self.get_options().spec_excludes
def console_output(self, _):
address_mapper = self.context.address_mapper
buildfiles = address_mapper.scan_buildfiles(get_buildroot(),
spec_excludes=self._spec_excludes)
build_graph = self.context.build_graph
build_file_parser = self.context.build_file_parser
dependees_by_target = defaultdict(set)
for build_file in buildfiles:
address_map = build_file_parser.parse_build_file(build_file)
for address in address_map.keys():
build_graph.inject_address_closure(address)
for address in address_map.keys():
target = build_graph.get_target(address)
# TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a
# user vs. targets created by pants at runtime.
target = self.get_concrete_target(target)
for dependency in target.dependencies:
dependency = self.get_concrete_target(dependency)
dependees_by_target[dependency].add(target)
roots = set(self.context.target_roots)
if self._closed:
for root in roots:
yield root.address.spec
for dependent in self.get_dependents(dependees_by_target, roots):
yield dependent.address.spec
def get_dependents(self, dependees_by_target, roots):
check = set(roots)
known_dependents = set()
while True:
dependents = set(known_dependents)
for target in check:
dependents.update(dependees_by_target[target])
check = dependents - known_dependents
if not check or not self._transitive:
return dependents - set(roots)
known_dependents = dependents
def get_concrete_target(self, target):
return target.concrete_derived_from
| apache-2.0 |
DirkdeDraak/easybuild-easyblocks | easybuild/easyblocks/s/scotch.py | 10 | 7651 | ##
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for SCOTCH, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import fileinput
import os
import re
import sys
import shutil
from distutils.version import LooseVersion
import easybuild.tools.toolchain as toolchain
from easybuild.framework.easyblock import EasyBlock
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import copytree
from easybuild.tools.run import run_cmd
class EB_SCOTCH(EasyBlock):
"""Support for building/installing SCOTCH."""
def configure_step(self):
"""Configure SCOTCH build: locate the template makefile, copy it to a general Makefile.inc and patch it."""
# pick template makefile
comp_fam = self.toolchain.comp_family()
if comp_fam == toolchain.INTELCOMP: #@UndefinedVariable
makefilename = 'Makefile.inc.x86-64_pc_linux2.icc'
elif comp_fam == toolchain.GCC: #@UndefinedVariable
makefilename = 'Makefile.inc.x86-64_pc_linux2'
else:
raise EasyBuildError("Unknown compiler family used: %s", comp_fam)
# create Makefile.inc
try:
srcdir = os.path.join(self.cfg['start_dir'], 'src')
src = os.path.join(srcdir, 'Make.inc', makefilename)
dst = os.path.join(srcdir, 'Makefile.inc')
shutil.copy2(src, dst)
self.log.debug("Successfully copied Makefile.inc to src dir.")
except OSError:
raise EasyBuildError("Copying Makefile.inc to src dir failed.")
# the default behaviour of these makefiles is still wrong
# e.g., compiler settings, and we need -lpthread
try:
for line in fileinput.input(dst, inplace=1, backup='.orig.easybuild'):
# use $CC and the likes since we're at it.
line = re.sub(r"^CCS\s*=.*$", "CCS\t= $(CC)", line)
line = re.sub(r"^CCP\s*=.*$", "CCP\t= $(MPICC)", line)
line = re.sub(r"^CCD\s*=.*$", "CCD\t= $(MPICC)", line)
# append -lpthread to LDFLAGS
line = re.sub(r"^LDFLAGS\s*=(?P<ldflags>.*$)", "LDFLAGS\t=\g<ldflags> -lpthread", line)
sys.stdout.write(line)
except IOError, err:
raise EasyBuildError("Can't modify/write Makefile in 'Makefile.inc': %s", err)
# change to src dir for building
try:
os.chdir(srcdir)
self.log.debug("Changing to src dir.")
except OSError, err:
raise EasyBuildError("Failed to change to src dir: %s", err)
def build_step(self):
"""Build by running build_step, but with some special options for SCOTCH depending on the compiler."""
ccs = os.environ['CC']
ccp = os.environ['MPICC']
ccd = os.environ['MPICC']
cflags = "-fPIC -O3 -DCOMMON_FILE_COMPRESS_GZ -DCOMMON_PTHREAD -DCOMMON_RANDOM_FIXED_SEED -DSCOTCH_RENAME"
if self.toolchain.comp_family() == toolchain.GCC: #@UndefinedVariable
cflags += " -Drestrict=__restrict"
else:
cflags += " -restrict -DIDXSIZE64"
if not self.toolchain.mpi_family() in [toolchain.INTELMPI, toolchain.QLOGICMPI]: #@UndefinedVariable
cflags += " -DSCOTCH_PTHREAD"
# actually build
apps = ['scotch', 'ptscotch']
if LooseVersion(self.version) >= LooseVersion('6.0'):
# separate target for esmumps in recent versions
apps.extend(['esmumps', 'ptesmumps'])
for app in apps:
cmd = 'make CCS="%s" CCP="%s" CCD="%s" CFLAGS="%s" %s' % (ccs, ccp, ccd, cflags, app)
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""Install by copying files and creating group library file."""
self.log.debug("Installing SCOTCH")
# copy files to install dir
regmetis = re.compile(r".*metis.*")
try:
for d in ["include", "lib", "bin", "man"]:
src = os.path.join(self.cfg['start_dir'], d)
dst = os.path.join(self.installdir, d)
# we don't need any metis stuff from scotch!
copytree(src, dst, ignore=lambda path, files: [x for x in files if regmetis.match(x)])
except OSError, err:
raise EasyBuildError("Copying %s to installation dir %s failed: %s", src, dst, err)
# create group library file
scotchlibdir = os.path.join(self.installdir, 'lib')
scotchgrouplib = os.path.join(scotchlibdir, 'libscotch_group.a')
try:
line = ' '.join(os.listdir(scotchlibdir))
line = "GROUP (%s)" % line
f = open(scotchgrouplib, 'w')
f.write(line)
f.close()
self.log.info("Successfully written group lib file: %s" % scotchgrouplib)
except (IOError, OSError), err:
raise EasyBuildError("Can't write to file %s: %s", scotchgrouplib, err)
def sanity_check_step(self):
"""Custom sanity check for SCOTCH."""
custom_paths = {
'files': ['bin/%s' % x for x in ["acpl", "amk_fft2", "amk_hy", "amk_p2", "dggath",
"dgord", "dgscat", "gbase", "gmap", "gmk_m2",
"gmk_msh", "gmtst", "gotst", "gpart", "gtst",
"mmk_m2", "mord", "amk_ccc", "amk_grf", "amk_m2",
"atst", "dgmap", "dgpart", "dgtst", "gcv", "gmk_hy",
"gmk_m3", "gmk_ub2", "gord", "gout", "gscat", "mcv",
"mmk_m3", "mtst"]] +
['include/%s.h' % x for x in ["esmumps","ptscotchf", "ptscotch","scotchf",
"scotch"]] +
['lib/lib%s.a' % x for x in ["esmumps","ptscotch", "ptscotcherrexit",
"scotcherr", "scotch_group", "ptesmumps",
"ptscotcherr", "scotch", "scotcherrexit"]],
'dirs':[]
}
super(EB_SCOTCH, self).sanity_check_step(custom_paths=custom_paths)
| gpl-2.0 |
liberorbis/libernext | env/lib/python2.7/site-packages/kombu/async/debug.py | 34 | 1484 | from __future__ import absolute_import
from kombu.five import items
from kombu.utils import reprcall
from kombu.utils.eventio import READ, WRITE, ERR
def repr_flag(flag):
return '{0}{1}{2}'.format('R' if flag & READ else '',
'W' if flag & WRITE else '',
'!' if flag & ERR else '')
def _rcb(obj):
if obj is None:
return '<missing>'
if isinstance(obj, str):
return obj
if isinstance(obj, tuple):
cb, args = obj
return reprcall(cb.__name__, args=args)
return obj.__name__
def repr_active(h):
return ', '.join(repr_readers(h) + repr_writers(h))
def repr_events(h, events):
return ', '.join(
'{0}({1})->{2}'.format(
_rcb(callback_for(h, fd, fl, '(GONE)')), fd,
repr_flag(fl),
)
for fd, fl in events
)
def repr_readers(h):
return ['({0}){1}->{2}'.format(fd, _rcb(cb), repr_flag(READ | ERR))
for fd, cb in items(h.readers)]
def repr_writers(h):
return ['({0}){1}->{2}'.format(fd, _rcb(cb), repr_flag(WRITE))
for fd, cb in items(h.writers)]
def callback_for(h, fd, flag, *default):
try:
if flag & READ:
return h.readers[fd]
if flag & WRITE:
if fd in h.consolidate:
return h.consolidate_callback
return h.writers[fd]
except KeyError:
if default:
return default[0]
raise
| gpl-2.0 |
Aravinthu/odoo | addons/account_asset/models/account_invoice.py | 1 | 5524 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT as DF
from odoo.addons import decimal_precision as dp
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_cancel(self):
res = super(AccountInvoice, self).action_cancel()
self.env['account.asset.asset'].sudo().search([('invoice_id', 'in', self.ids)]).write({'active': False})
return res
@api.multi
def action_move_create(self):
result = super(AccountInvoice, self).action_move_create()
for inv in self:
context = dict(self.env.context)
# Within the context of an invoice,
# this default value is for the type of the invoice, not the type of the asset.
# This has to be cleaned from the context before creating the asset,
# otherwise it tries to create the asset with the type of the invoice.
context.pop('default_type', None)
inv.invoice_line_ids.with_context(context).asset_create()
return result
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
asset_category_id = fields.Many2one('account.asset.category', string='Asset Category')
asset_start_date = fields.Date(string='Asset Start Date', compute='_get_asset_date', readonly=True, store=True)
asset_end_date = fields.Date(string='Asset End Date', compute='_get_asset_date', readonly=True, store=True)
asset_mrr = fields.Float(string='Monthly Recurring Revenue', compute='_get_asset_date', readonly=True, digits=dp.get_precision('Account'), store=True)
@api.one
@api.depends('asset_category_id', 'invoice_id.date_invoice')
def _get_asset_date(self):
self.asset_mrr = 0
self.asset_start_date = False
self.asset_end_date = False
cat = self.asset_category_id
if cat:
if cat.method_number == 0 or cat.method_period == 0:
raise UserError(_('The number of depreciations or the period length of your asset category cannot be null.'))
months = cat.method_number * cat.method_period
if self.invoice_id.type in ['out_invoice', 'out_refund']:
self.asset_mrr = self.price_subtotal_signed / months
if self.invoice_id.date_invoice:
start_date = datetime.strptime(self.invoice_id.date_invoice, DF).replace(day=1)
end_date = (start_date + relativedelta(months=months, days=-1))
self.asset_start_date = start_date.strftime(DF)
self.asset_end_date = end_date.strftime(DF)
@api.one
def asset_create(self):
if self.asset_category_id:
vals = {
'name': self.name,
'code': self.invoice_id.number or False,
'category_id': self.asset_category_id.id,
'value': self.price_subtotal_signed,
'partner_id': self.invoice_id.partner_id.id,
'company_id': self.invoice_id.company_id.id,
'currency_id': self.invoice_id.company_currency_id.id,
'date': self.invoice_id.date_invoice,
'invoice_id': self.invoice_id.id,
}
changed_vals = self.env['account.asset.asset'].onchange_category_id_values(vals['category_id'])
vals.update(changed_vals['value'])
asset = self.env['account.asset.asset'].create(vals)
if self.asset_category_id.open_asset:
asset.validate()
return True
@api.onchange('asset_category_id')
def onchange_asset_category_id(self):
if self.invoice_id.type == 'out_invoice' and self.asset_category_id:
self.account_id = self.asset_category_id.account_asset_id.id
elif self.invoice_id.type == 'in_invoice' and self.asset_category_id:
self.account_id = self.asset_category_id.account_asset_id.id
@api.onchange('uom_id')
def _onchange_uom_id(self):
result = super(AccountInvoiceLine, self)._onchange_uom_id()
self.onchange_asset_category_id()
return result
@api.onchange('product_id')
def _onchange_product_id(self):
vals = super(AccountInvoiceLine, self)._onchange_product_id()
if self.product_id:
if self.invoice_id.type == 'out_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.deferred_revenue_category_id
elif self.invoice_id.type == 'in_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.asset_category_id
return vals
def _set_additional_fields(self, invoice):
if not self.asset_category_id:
if invoice.type == 'out_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.deferred_revenue_category_id.id
elif invoice.type == 'in_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.asset_category_id.id
self.onchange_asset_category_id()
super(AccountInvoiceLine, self)._set_additional_fields(invoice)
def get_invoice_line_account(self, type, product, fpos, company):
return product.asset_category_id.account_asset_id or super(AccountInvoiceLine, self).get_invoice_line_account(type, product, fpos, company)
| agpl-3.0 |
notriddle/servo | tests/wpt/web-platform-tests/tools/third_party/h2/examples/fragments/server_https_setup_fragment.py | 25 | 3875 | # -*- coding: utf-8 -*-
"""
Server HTTPS Setup
~~~~~~~~~~~~~~~~~~
This example code fragment demonstrates how to set up a HTTP/2 server that
negotiates HTTP/2 using NPN and ALPN. For the sake of maximum explanatory value
this code uses the synchronous, low-level sockets API: however, if you're not
using sockets directly (e.g. because you're using asyncio), you should focus on
the set up required for the SSLContext object. For other concurrency libraries
you may need to use other setup (e.g. for Twisted you'll need to use
IProtocolNegotiationFactory).
This code requires Python 3.5 or later.
"""
import h2.config
import h2.connection
import socket
import ssl
def establish_tcp_connection():
"""
This function establishes a server-side TCP connection. How it works isn't
very important to this example.
"""
bind_socket = socket.socket()
bind_socket.bind(('', 443))
bind_socket.listen(5)
return bind_socket.accept()[0]
def get_http2_ssl_context():
"""
This function creates an SSLContext object that is suitably configured for
HTTP/2. If you're working with Python TLS directly, you'll want to do the
exact same setup as this function does.
"""
# Get the basic context from the standard library.
ctx = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
# RFC 7540 Section 9.2: Implementations of HTTP/2 MUST use TLS version 1.2
# or higher. Disable TLS 1.1 and lower.
ctx.options |= (
ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
)
# RFC 7540 Section 9.2.1: A deployment of HTTP/2 over TLS 1.2 MUST disable
# compression.
ctx.options |= ssl.OP_NO_COMPRESSION
# RFC 7540 Section 9.2.2: "deployments of HTTP/2 that use TLS 1.2 MUST
# support TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256". In practice, the
# blacklist defined in this section allows only the AES GCM and ChaCha20
# cipher suites with ephemeral key negotiation.
ctx.set_ciphers("ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20")
# We want to negotiate using NPN and ALPN. ALPN is mandatory, but NPN may
# be absent, so allow that. This setup allows for negotiation of HTTP/1.1.
ctx.set_alpn_protocols(["h2", "http/1.1"])
try:
ctx.set_npn_protocols(["h2", "http/1.1"])
except NotImplementedError:
pass
return ctx
def negotiate_tls(tcp_conn, context):
"""
Given an established TCP connection and a HTTP/2-appropriate TLS context,
this function:
1. wraps TLS around the TCP connection.
2. confirms that HTTP/2 was negotiated and, if it was not, throws an error.
"""
tls_conn = context.wrap_socket(tcp_conn, server_side=True)
# Always prefer the result from ALPN to that from NPN.
# You can only check what protocol was negotiated once the handshake is
# complete.
negotiated_protocol = tls_conn.selected_alpn_protocol()
if negotiated_protocol is None:
negotiated_protocol = tls_conn.selected_npn_protocol()
if negotiated_protocol != "h2":
raise RuntimeError("Didn't negotiate HTTP/2!")
return tls_conn
def main():
# Step 1: Set up your TLS context.
context = get_http2_ssl_context()
# Step 2: Receive a TCP connection.
connection = establish_tcp_connection()
# Step 3: Wrap the connection in TLS and validate that we negotiated HTTP/2
tls_connection = negotiate_tls(connection, context)
# Step 4: Create a server-side H2 connection.
config = h2.config.H2Configuration(client_side=False)
http2_connection = h2.connection.H2Connection(config=config)
# Step 5: Initiate the connection
http2_connection.initiate_connection()
tls_connection.sendall(http2_connection.data_to_send())
# The TCP, TLS, and HTTP/2 handshakes are now complete. You can enter your
# main loop now.
| mpl-2.0 |
FAForever/client | src/modvault/uimodwidget.py | 1 | 2094 |
import urllib.request, urllib.error, urllib.parse
from PyQt5 import QtCore, QtWidgets
import modvault
import util
FormClass, BaseClass = util.THEME.loadUiType("modvault/uimod.ui")
class UIModWidget(FormClass, BaseClass):
FORMATTER_UIMOD = str(util.THEME.readfile("modvault/uimod.qthtml"))
def __init__(self, parent, *args, **kwargs):
BaseClass.__init__(self, *args, **kwargs)
self.setupUi(self)
self.parent = parent
util.THEME.stylesheets_reloaded.connect(self.load_stylesheet)
self.load_stylesheet()
self.setWindowTitle("Ui Mod Manager")
self.doneButton.clicked.connect(self.doneClicked)
self.modList.itemEntered.connect(self.hoverOver)
allmods = modvault.getInstalledMods()
self.uimods = {}
for mod in allmods:
if mod.ui_only:
self.uimods[mod.totalname] = mod
self.modList.addItem(mod.totalname)
names = [mod.totalname for mod in modvault.getActiveMods(uimods=True)]
for name in names:
l = self.modList.findItems(name, QtCore.Qt.MatchExactly)
if l:
l[0].setSelected(True)
if len(self.uimods) != 0:
self.hoverOver(self.modList.item(0))
def load_stylesheet(self):
self.setStyleSheet(util.THEME.readstylesheet("client/client.css"))
@QtCore.pyqtSlot()
def doneClicked(self):
selected_mods = [self.uimods[str(item.text())] for item in self.modList.selectedItems()]
succes = modvault.setActiveMods(selected_mods, False)
if not succes:
QtWidgets.QMessageBox.information(None, "Error", "Could not set the active UI mods. Maybe something is "
"wrong with your game.prefs file. Please send your log.")
self.done(1)
@QtCore.pyqtSlot(QtWidgets.QListWidgetItem)
def hoverOver(self, item):
mod = self.uimods[str(item.text())]
self.modInfo.setText(self.FORMATTER_UIMOD.format(name=mod.totalname, description=mod.description))
| gpl-3.0 |
CUCWD/edx-platform | lms/djangoapps/verify_student/tests/test_utils.py | 13 | 5914 | # -*- coding: utf-8 -*-
"""
Tests for verify_student utility functions.
"""
from datetime import datetime, timedelta
import ddt
import unittest
import pytz
from mock import patch
from pytest import mark
from django.conf import settings
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification, SSOVerification, ManualVerification
from lms.djangoapps.verify_student.utils import verification_for_datetime, most_recent_verification
from student.tests.factories import UserFactory
FAKE_SETTINGS = {
"DAYS_GOOD_FOR": 10,
}
@ddt.ddt
@patch.dict(settings.VERIFY_STUDENT, FAKE_SETTINGS)
@mark.django_db
class TestVerifyStudentUtils(unittest.TestCase):
"""
Tests for utility functions in verify_student.
"""
shard = 4
def test_verification_for_datetime(self):
user = UserFactory.create()
now = datetime.now(pytz.UTC)
# No attempts in the query set, so should return None
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(now, query)
self.assertIs(result, None)
# Should also return None if no deadline specified
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(None, query)
self.assertIs(result, None)
# Make an attempt
attempt = SoftwareSecurePhotoVerification.objects.create(user=user)
# Before the created date, should get no results
before = attempt.created_at - timedelta(seconds=1)
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(before, query)
self.assertIs(result, None)
# Immediately after the created date, should get the attempt
after_created = attempt.created_at + timedelta(seconds=1)
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(after_created, query)
self.assertEqual(result, attempt)
# If no deadline specified, should return first available
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(None, query)
self.assertEqual(result, attempt)
# Immediately before the expiration date, should get the attempt
expiration = attempt.created_at + timedelta(days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"])
before_expiration = expiration - timedelta(seconds=1)
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(before_expiration, query)
self.assertEqual(result, attempt)
# Immediately after the expiration date, should not get the attempt
attempt.created_at = attempt.created_at - timedelta(days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"])
attempt.save()
after = datetime.now(pytz.UTC) + timedelta(days=1)
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(after, query)
self.assertIs(result, None)
# Create a second attempt in the same window
second_attempt = SoftwareSecurePhotoVerification.objects.create(user=user)
# Now we should get the newer attempt
deadline = second_attempt.created_at + timedelta(days=1)
query = SoftwareSecurePhotoVerification.objects.filter(user=user)
result = verification_for_datetime(deadline, query)
self.assertEqual(result, second_attempt)
@ddt.data(
(False, False, False, None, None),
(True, False, False, None, 'photo'),
(False, True, False, None, 'sso'),
(False, False, True, None, 'manual'),
(True, True, True, 'photo', 'sso'),
(True, True, True, 'sso', 'photo'),
(True, True, True, 'manual', 'photo')
)
@ddt.unpack
def test_most_recent_verification(
self,
create_photo_verification,
create_sso_verification,
create_manual_verification,
first_verification,
expected_verification):
user = UserFactory.create()
photo_verification = None
sso_verification = None
manual_verification = None
if not first_verification:
if create_photo_verification:
photo_verification = SoftwareSecurePhotoVerification.objects.create(user=user)
if create_sso_verification:
sso_verification = SSOVerification.objects.create(user=user)
if create_manual_verification:
manual_verification = ManualVerification.objects.create(user=user)
elif first_verification == 'photo':
photo_verification = SoftwareSecurePhotoVerification.objects.create(user=user)
sso_verification = SSOVerification.objects.create(user=user)
elif first_verification == 'sso':
sso_verification = SSOVerification.objects.create(user=user)
photo_verification = SoftwareSecurePhotoVerification.objects.create(user=user)
else:
manual_verification = ManualVerification.objects.create(user=user)
photo_verification = SoftwareSecurePhotoVerification.objects.create(user=user)
most_recent = most_recent_verification(
SoftwareSecurePhotoVerification.objects.all(),
SSOVerification.objects.all(),
ManualVerification.objects.all(),
'created_at'
)
if not expected_verification:
self.assertEqual(most_recent, None)
elif expected_verification == 'photo':
self.assertEqual(most_recent, photo_verification)
elif expected_verification == 'sso':
self.assertEqual(most_recent, sso_verification)
else:
self.assertEqual(most_recent, manual_verification)
| agpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4.3/Lib/copy_reg.py | 12 | 6244 | """Helper to provide extensibility for pickle/cPickle.
This is only useful to add pickle support for extension types defined in
C, not for instances of user-defined classes.
"""
from types import ClassType as _ClassType
__all__ = ["pickle", "constructor",
"add_extension", "remove_extension", "clear_extension_cache"]
dispatch_table = {}
def pickle(ob_type, pickle_function, constructor_ob=None):
if type(ob_type) is _ClassType:
raise TypeError("copy_reg is not intended for use with classes")
if not callable(pickle_function):
raise TypeError("reduction functions must be callable")
dispatch_table[ob_type] = pickle_function
# The constructor_ob function is a vestige of safe for unpickling.
# There is no reason for the caller to pass it anymore.
if constructor_ob is not None:
constructor(constructor_ob)
def constructor(object):
if not callable(object):
raise TypeError("constructors must be callable")
# Example: provide pickling support for complex numbers.
try:
complex
except NameError:
pass
else:
def pickle_complex(c):
return complex, (c.real, c.imag)
pickle(complex, pickle_complex, complex)
# Support for pickling new-style objects
def _reconstructor(cls, base, state):
if base is object:
obj = object.__new__(cls)
else:
obj = base.__new__(cls, state)
base.__init__(obj, state)
return obj
_HEAPTYPE = 1<<9
# Python code for object.__reduce_ex__ for protocols 0 and 1
def _reduce_ex(self, proto):
assert proto < 2
for base in self.__class__.__mro__:
if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
break
else:
base = object # not really reachable
if base is object:
state = None
else:
if base is self.__class__:
raise TypeError, "can't pickle %s objects" % base.__name__
state = base(self)
args = (self.__class__, base, state)
try:
getstate = self.__getstate__
except AttributeError:
if getattr(self, "__slots__", None):
raise TypeError("a class that defines __slots__ without "
"defining __getstate__ cannot be pickled")
try:
dict = self.__dict__
except AttributeError:
dict = None
else:
dict = getstate()
if dict:
return _reconstructor, args, dict
else:
return _reconstructor, args
# Helper for __reduce_ex__ protocol 2
def __newobj__(cls, *args):
return cls.__new__(cls, *args)
def _slotnames(cls):
"""Return a list of slot names for a given class.
This needs to find slots defined by the class and its bases, so we
can't simply return the __slots__ attribute. We must walk down
the Method Resolution Order and concatenate the __slots__ of each
class found there. (This assumes classes don't modify their
__slots__ attribute to misrepresent their slots after the class is
defined.)
"""
# Get the value from a cache in the class if possible
names = cls.__dict__.get("__slotnames__")
if names is not None:
return names
# Not cached -- calculate the value
names = []
if not hasattr(cls, "__slots__"):
# This class has no slots
pass
else:
# Slots found -- gather slot names from all base classes
for c in cls.__mro__:
if "__slots__" in c.__dict__:
names += [name for name in c.__dict__["__slots__"]
if name not in ("__dict__", "__weakref__")]
# Cache the outcome in the class if at all possible
try:
cls.__slotnames__ = names
except:
pass # But don't die if we can't
return names
# A registry of extension codes. This is an ad-hoc compression
# mechanism. Whenever a global reference to <module>, <name> is about
# to be pickled, the (<module>, <name>) tuple is looked up here to see
# if it is a registered extension code for it. Extension codes are
# universal, so that the meaning of a pickle does not depend on
# context. (There are also some codes reserved for local use that
# don't have this restriction.) Codes are positive ints; 0 is
# reserved.
_extension_registry = {} # key -> code
_inverted_registry = {} # code -> key
_extension_cache = {} # code -> object
# Don't ever rebind those names: cPickle grabs a reference to them when
# it's initialized, and won't see a rebinding.
def add_extension(module, name, code):
"""Register an extension code."""
code = int(code)
if not 1 <= code <= 0x7fffffff:
raise ValueError, "code out of range"
key = (module, name)
if (_extension_registry.get(key) == code and
_inverted_registry.get(code) == key):
return # Redundant registrations are benign
if key in _extension_registry:
raise ValueError("key %s is already registered with code %s" %
(key, _extension_registry[key]))
if code in _inverted_registry:
raise ValueError("code %s is already in use for key %s" %
(code, _inverted_registry[code]))
_extension_registry[key] = code
_inverted_registry[code] = key
def remove_extension(module, name, code):
"""Unregister an extension code. For testing only."""
key = (module, name)
if (_extension_registry.get(key) != code or
_inverted_registry.get(code) != key):
raise ValueError("key %s is not registered with code %s" %
(key, code))
del _extension_registry[key]
del _inverted_registry[code]
if code in _extension_cache:
del _extension_cache[code]
def clear_extension_cache():
_extension_cache.clear()
# Standard extension code assignments
# Reserved ranges
# First Last Count Purpose
# 1 127 127 Reserved for Python standard library
# 128 191 64 Reserved for Zope
# 192 239 48 Reserved for 3rd parties
# 240 255 16 Reserved for private use (will never be assigned)
# 256 Inf Inf Reserved for future assignment
# Extension codes are assigned by the Python Software Foundation.
| mit |
LookThisCode/DeveloperBus | Season 2013/Brazil/Projects/QueroMe-master/openid/fetchers.py | 1 | 13915 | # -*- test-case-name: openid.test.test_fetchers -*-
"""
This module contains the HTTP fetcher interface and several implementations.
"""
__all__ = ['fetch', 'getDefaultFetcher', 'setDefaultFetcher', 'HTTPResponse',
'HTTPFetcher', 'createHTTPFetcher', 'HTTPFetchingError',
'HTTPError']
import urllib2
import time
import cStringIO
import sys
import openid
# Try to import httplib2 for caching support
# http://bitworking.org/projects/httplib2/
try:
import httplib2
except ImportError:
# httplib2 not available
httplib2 = None
# try to import pycurl, which will let us use CurlHTTPFetcher
try:
import pycurl
except ImportError:
pycurl = None
USER_AGENT = "python-openid/%s (%s)" % (openid.__version__, sys.platform)
MAX_RESPONSE_KB = 1024
def fetch(url, body=None, headers=None):
"""Invoke the fetch method on the default fetcher. Most users
should need only this method.
@raises Exception: any exceptions that may be raised by the default fetcher
"""
fetcher = getDefaultFetcher()
return fetcher.fetch(url, body, headers)
def createHTTPFetcher():
"""Create a default HTTP fetcher instance
prefers Curl to urllib2."""
if pycurl is None:
fetcher = Urllib2Fetcher()
else:
fetcher = CurlHTTPFetcher()
return fetcher
# Contains the currently set HTTP fetcher. If it is set to None, the
# library will call createHTTPFetcher() to set it. Do not access this
# variable outside of this module.
_default_fetcher = None
def getDefaultFetcher():
"""Return the default fetcher instance
if no fetcher has been set, it will create a default fetcher.
@return: the default fetcher
@rtype: HTTPFetcher
"""
global _default_fetcher
if _default_fetcher is None:
setDefaultFetcher(createHTTPFetcher())
return _default_fetcher
def setDefaultFetcher(fetcher, wrap_exceptions=True):
"""Set the default fetcher
@param fetcher: The fetcher to use as the default HTTP fetcher
@type fetcher: HTTPFetcher
@param wrap_exceptions: Whether to wrap exceptions thrown by the
fetcher wil HTTPFetchingError so that they may be caught
easier. By default, exceptions will be wrapped. In general,
unwrapped fetchers are useful for debugging of fetching errors
or if your fetcher raises well-known exceptions that you would
like to catch.
@type wrap_exceptions: bool
"""
global _default_fetcher
if fetcher is None or not wrap_exceptions:
_default_fetcher = fetcher
else:
_default_fetcher = ExceptionWrappingFetcher(fetcher)
def usingCurl():
"""Whether the currently set HTTP fetcher is a Curl HTTP fetcher."""
return isinstance(getDefaultFetcher(), CurlHTTPFetcher)
class HTTPResponse(object):
"""XXX document attributes"""
headers = None
status = None
body = None
final_url = None
def __init__(self, final_url=None, status=None, headers=None, body=None):
self.final_url = final_url
self.status = status
self.headers = headers
self.body = body
def __repr__(self):
return "<%s status %s for %s>" % (self.__class__.__name__,
self.status,
self.final_url)
class HTTPFetcher(object):
"""
This class is the interface for openid HTTP fetchers. This
interface is only important if you need to write a new fetcher for
some reason.
"""
def fetch(self, url, body=None, headers=None):
"""
This performs an HTTP POST or GET, following redirects along
the way. If a body is specified, then the request will be a
POST. Otherwise, it will be a GET.
@param headers: HTTP headers to include with the request
@type headers: {str:str}
@return: An object representing the server's HTTP response. If
there are network or protocol errors, an exception will be
raised. HTTP error responses, like 404 or 500, do not
cause exceptions.
@rtype: L{HTTPResponse}
@raise Exception: Different implementations will raise
different errors based on the underlying HTTP library.
"""
raise NotImplementedError
def _allowedURL(url):
return url.startswith('http://') or url.startswith('https://')
class HTTPFetchingError(Exception):
"""Exception that is wrapped around all exceptions that are raised
by the underlying fetcher when using the ExceptionWrappingFetcher
@ivar why: The exception that caused this exception
"""
def __init__(self, why=None):
Exception.__init__(self, why)
self.why = why
class ExceptionWrappingFetcher(HTTPFetcher):
"""Fetcher that wraps another fetcher, causing all exceptions
@cvar uncaught_exceptions: Exceptions that should be exposed to the
user if they are raised by the fetch call
"""
uncaught_exceptions = (SystemExit, KeyboardInterrupt, MemoryError)
def __init__(self, fetcher):
self.fetcher = fetcher
def fetch(self, *args, **kwargs):
try:
return self.fetcher.fetch(*args, **kwargs)
except self.uncaught_exceptions:
raise
except:
exc_cls, exc_inst = sys.exc_info()[:2]
if exc_inst is None:
# string exceptions
exc_inst = exc_cls
raise HTTPFetchingError(why=exc_inst)
class Urllib2Fetcher(HTTPFetcher):
"""An C{L{HTTPFetcher}} that uses urllib2.
"""
# Parameterized for the benefit of testing frameworks, see
# http://trac.openidenabled.com/trac/ticket/85
urlopen = staticmethod(urllib2.urlopen)
def fetch(self, url, body=None, headers=None):
if not _allowedURL(url):
raise ValueError('Bad URL scheme: %r' % (url,))
if headers is None:
headers = {}
headers.setdefault(
'User-Agent',
"%s Python-urllib/%s" % (USER_AGENT, urllib2.__version__,))
req = urllib2.Request(url, data=body, headers=headers)
try:
f = self.urlopen(req)
try:
return self._makeResponse(f)
finally:
f.close()
except urllib2.HTTPError, why:
try:
return self._makeResponse(why)
finally:
why.close()
def _makeResponse(self, urllib2_response):
resp = HTTPResponse()
resp.body = urllib2_response.read(MAX_RESPONSE_KB * 1024)
resp.final_url = urllib2_response.geturl()
resp.headers = dict(urllib2_response.info().items())
if hasattr(urllib2_response, 'code'):
resp.status = urllib2_response.code
else:
resp.status = 200
return resp
class HTTPError(HTTPFetchingError):
"""
This exception is raised by the C{L{CurlHTTPFetcher}} when it
encounters an exceptional situation fetching a URL.
"""
pass
# XXX: define what we mean by paranoid, and make sure it is.
class CurlHTTPFetcher(HTTPFetcher):
"""
An C{L{HTTPFetcher}} that uses pycurl for fetching.
See U{http://pycurl.sourceforge.net/}.
"""
ALLOWED_TIME = 20 # seconds
def __init__(self):
HTTPFetcher.__init__(self)
if pycurl is None:
raise RuntimeError('Cannot find pycurl library')
def _parseHeaders(self, header_file):
header_file.seek(0)
# Remove the status line from the beginning of the input
unused_http_status_line = header_file.readline().lower ()
if unused_http_status_line.startswith('http/1.1 100 '):
unused_http_status_line = header_file.readline()
unused_http_status_line = header_file.readline()
lines = [line.strip() for line in header_file]
# and the blank line from the end
empty_line = lines.pop()
if empty_line:
raise HTTPError("No blank line at end of headers: %r" % (line,))
headers = {}
for line in lines:
try:
name, value = line.split(':', 1)
except ValueError:
raise HTTPError(
"Malformed HTTP header line in response: %r" % (line,))
value = value.strip()
# HTTP headers are case-insensitive
name = name.lower()
headers[name] = value
return headers
def _checkURL(self, url):
# XXX: document that this can be overridden to match desired policy
# XXX: make sure url is well-formed and routeable
return _allowedURL(url)
def fetch(self, url, body=None, headers=None):
stop = int(time.time()) + self.ALLOWED_TIME
off = self.ALLOWED_TIME
if headers is None:
headers = {}
headers.setdefault('User-Agent',
"%s %s" % (USER_AGENT, pycurl.version,))
header_list = []
if headers is not None:
for header_name, header_value in headers.iteritems():
header_list.append('%s: %s' % (header_name, header_value))
c = pycurl.Curl()
try:
c.setopt(pycurl.NOSIGNAL, 1)
if header_list:
c.setopt(pycurl.HTTPHEADER, header_list)
# Presence of a body indicates that we should do a POST
if body is not None:
c.setopt(pycurl.POST, 1)
c.setopt(pycurl.POSTFIELDS, body)
while off > 0:
if not self._checkURL(url):
raise HTTPError("Fetching URL not allowed: %r" % (url,))
data = cStringIO.StringIO()
def write_data(chunk):
if data.tell() > 1024*MAX_RESPONSE_KB:
return 0
else:
return data.write(chunk)
response_header_data = cStringIO.StringIO()
c.setopt(pycurl.WRITEFUNCTION, write_data)
c.setopt(pycurl.HEADERFUNCTION, response_header_data.write)
c.setopt(pycurl.TIMEOUT, off)
c.setopt(pycurl.URL, openid.urinorm.urinorm(url))
c.perform()
response_headers = self._parseHeaders(response_header_data)
code = c.getinfo(pycurl.RESPONSE_CODE)
if code in [301, 302, 303, 307]:
url = response_headers.get('location')
if url is None:
raise HTTPError(
'Redirect (%s) returned without a location' % code)
# Redirects are always GETs
c.setopt(pycurl.POST, 0)
# There is no way to reset POSTFIELDS to empty and
# reuse the connection, but we only use it once.
else:
resp = HTTPResponse()
resp.headers = response_headers
resp.status = code
resp.final_url = url
resp.body = data.getvalue()
return resp
off = stop - int(time.time())
raise HTTPError("Timed out fetching: %r" % (url,))
finally:
c.close()
class HTTPLib2Fetcher(HTTPFetcher):
"""A fetcher that uses C{httplib2} for performing HTTP
requests. This implementation supports HTTP caching.
@see: http://bitworking.org/projects/httplib2/
"""
def __init__(self, cache=None):
"""@param cache: An object suitable for use as an C{httplib2}
cache. If a string is passed, it is assumed to be a
directory name.
"""
if httplib2 is None:
raise RuntimeError('Cannot find httplib2 library. '
'See http://bitworking.org/projects/httplib2/')
super(HTTPLib2Fetcher, self).__init__()
# An instance of the httplib2 object that performs HTTP requests
self.httplib2 = httplib2.Http(cache)
# We want httplib2 to raise exceptions for errors, just like
# the other fetchers.
self.httplib2.force_exception_to_status_code = False
def fetch(self, url, body=None, headers=None):
"""Perform an HTTP request
@raises Exception: Any exception that can be raised by httplib2
@see: C{L{HTTPFetcher.fetch}}
"""
if body:
method = 'POST'
else:
method = 'GET'
if headers is None:
headers = {}
# httplib2 doesn't check to make sure that the URL's scheme is
# 'http' so we do it here.
if not (url.startswith('http://') or url.startswith('https://')):
raise ValueError('URL is not a HTTP URL: %r' % (url,))
httplib2_response, content = self.httplib2.request(
url, method, body=body, headers=headers)
# Translate the httplib2 response to our HTTP response abstraction
# When a 400 is returned, there is no "content-location"
# header set. This seems like a bug to me. I can't think of a
# case where we really care about the final URL when it is an
# error response, but being careful about it can't hurt.
try:
final_url = httplib2_response['content-location']
except KeyError:
# We're assuming that no redirects occurred
assert not httplib2_response.previous
# And this should never happen for a successful response
assert httplib2_response.status != 200
final_url = url
return HTTPResponse(
body=content,
final_url=final_url,
headers=dict(httplib2_response.items()),
status=httplib2_response.status,
)
| apache-2.0 |
ThinkingBridge/platform_external_chromium_org | chrome/test/functional/chromeos_device_policy.py | 41 | 8712 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pyauto_functional # Must come before pyauto (and thus, policy_base).
import policy_base
class ChromeosDevicePolicy(policy_base.PolicyTestBase):
"""Tests various ChromeOS device policies."""
# Cache user credentials for easy lookup.
private_info = policy_base.PolicyTestBase.GetPrivateInfo()
credentials = (private_info['prod_enterprise_test_user'],
private_info['prod_enterprise_executive_user'],
private_info['prod_enterprise_sales_user'])
_usernames = [credential['username'] for credential in credentials]
_passwords = [credential['password'] for credential in credentials]
def LoginAsGuest(self):
self.assertFalse(self.GetLoginInfo()['is_logged_in'],
msg='Expected to be logged out.')
policy_base.PolicyTestBase.LoginAsGuest(self)
self.assertTrue(self.GetLoginInfo()['is_logged_in'],
msg='Expected to be logged in.')
def _Login(self, user_index, expect_success):
self.assertFalse(self.GetLoginInfo()['is_logged_in'],
msg='Expected to be logged out.')
policy_base.PolicyTestBase.Login(self,
self._usernames[user_index],
self._passwords[user_index])
if expect_success:
self.assertTrue(self.GetLoginInfo()['is_logged_in'],
msg='Expected to be logged in.')
else:
self.assertFalse(self.GetLoginInfo()['is_logged_in'],
msg='Expected to not be logged in.')
def _CheckGuestModeAvailableInLoginWindow(self):
return self.ExecuteJavascriptInOOBEWebUI(
"""window.domAutomationController.send(
!document.getElementById('guestSignin').hidden);
""")
def _CheckGuestModeAvailableInAccountPicker(self):
return self.ExecuteJavascriptInOOBEWebUI(
"""window.domAutomationController.send(
!!document.getElementById('pod-row').getPodWithUsername_(''));
""")
def _CheckPodVisible(self, username):
javascript = """
var pod = document.getElementById('pod-row').getPodWithUsername_('%s');
window.domAutomationController.send(!!pod && !pod.hidden);
"""
return self.ExecuteJavascriptInOOBEWebUI(javascript % username)
def _WaitForPodVisibility(self, username, visible):
self.assertTrue(
self.WaitUntil(function=lambda: self._CheckPodVisible(username),
expect_retval=visible),
msg='Expected pod for user %s to %s be visible.' %
(username, '' if visible else 'not'))
def testGuestModeEnabled(self):
"""Checks that guest mode login can be enabled/disabled."""
self.SetDevicePolicy({'guest_mode_enabled': True})
self.assertTrue(self._CheckGuestModeAvailableInLoginWindow(),
msg='Expected guest mode to be available.')
self.LoginAsGuest()
self.Logout()
self.SetDevicePolicy({'guest_mode_enabled': False})
self.assertFalse(self._CheckGuestModeAvailableInLoginWindow(),
msg='Expected guest mode to not be available.')
# Log in as a regular so that the pod row contains at least one pod and the
# account picker is shown.
self._Login(user_index=0, expect_success=True)
self.Logout()
self.SetDevicePolicy({'guest_mode_enabled': True})
self.assertTrue(self._CheckGuestModeAvailableInAccountPicker(),
msg='Expected guest mode to be available.')
self.LoginAsGuest()
self.Logout()
self.SetDevicePolicy({'guest_mode_enabled': False})
self.assertFalse(self._CheckGuestModeAvailableInAccountPicker(),
msg='Expected guest mode to not be available.')
def testShowUserNamesOnSignin(self):
"""Checks that the account picker can be enabled/disabled."""
# Log in as a regular user so that the pod row contains at least one pod and
# the account picker can be shown.
self._Login(user_index=0, expect_success=True)
self.Logout()
self.SetDevicePolicy({'show_user_names': False})
self._WaitForLoginScreenId('gaia-signin')
self.SetDevicePolicy({'show_user_names': True})
self._WaitForLoginScreenId('account-picker')
def testUserWhitelistAndAllowNewUsers(self):
"""Checks that login can be (dis)allowed by whitelist and allow-new-users.
The test verifies that these two interrelated policies behave as documented
in the chrome/browser/policy/proto/chrome_device_policy.proto file. Cases
for which the current behavior is marked as "broken" are intentionally
ommitted since the broken behavior should be fixed rather than protected by
tests.
"""
# No whitelist
self.SetDevicePolicy({'allow_new_users': True})
self._Login(user_index=0, expect_success=True)
self.Logout()
# Empty whitelist
self.SetDevicePolicy({'user_whitelist': []})
self._Login(user_index=0, expect_success=True)
self.Logout()
self.SetDevicePolicy({'allow_new_users': True,
'user_whitelist': []})
self._Login(user_index=0, expect_success=True)
self.Logout()
# Populated whitelist
self.SetDevicePolicy({'user_whitelist': [self._usernames[0]]})
self._Login(user_index=0, expect_success=True)
self.Logout()
self._Login(user_index=1, expect_success=False)
self.SetDevicePolicy({'allow_new_users': True,
'user_whitelist': [self._usernames[0]]})
self._Login(user_index=0, expect_success=True)
self.Logout()
self._Login(user_index=1, expect_success=True)
self.Logout()
# New users not allowed, populated whitelist
self.SetDevicePolicy({'allow_new_users': False,
'user_whitelist': [self._usernames[0]]})
self._Login(user_index=0, expect_success=True)
self.Logout()
self._Login(user_index=1, expect_success=False)
def testUserWhitelistInAccountPicker(self):
"""Checks that setting a whitelist removes non-whitelisted user pods."""
# Disable the account picker so that the login form is shown and the Login()
# automation call can be used.
self.PrepareToWaitForLoginFormReload()
self.SetDevicePolicy({'show_user_names': False})
self.WaitForLoginFormReload()
# Log in to populate the list of existing users.
self._Login(user_index=0, expect_success=True)
self.Logout()
self._Login(user_index=1, expect_success=True)
self.Logout()
# Enable the account picker.
self.SetDevicePolicy({'show_user_names': True})
self._WaitForLoginScreenId('account-picker')
# Check pod visibility with and without a whitelist.
self._WaitForPodVisibility(username=self._usernames[0], visible=True)
self._WaitForPodVisibility(username=self._usernames[1], visible=True)
self.SetDevicePolicy({'show_user_names': True,
'user_whitelist': [self._usernames[1]]})
self._WaitForPodVisibility(username=self._usernames[0], visible=False)
self._WaitForPodVisibility(username=self._usernames[1], visible=True)
self.SetDevicePolicy({'show_user_names': True})
self._WaitForPodVisibility(username=self._usernames[0], visible=True)
self._WaitForPodVisibility(username=self._usernames[1], visible=True)
_timezones = ['America/Barbados', 'Europe/Helsinki']
def testTimezoneSettingWithoutPolicy(self):
"""Without timezone policy, timezone changes by user are persistent."""
self.SetDevicePolicy(refresh=False)
for timezone in self._timezones:
self._Login(user_index=1, expect_success=True)
self.SetTimezone(timezone)
self.assertEqual(timezone, self.GetTimeInfo()['timezone'])
self.Logout()
self.assertEqual(timezone, self.GetTimeInfo()['timezone'])
def testTimezoneSettingWithPolicy(self):
"""With timezone policy, timezone changes by user are reset on logout."""
self.SetDevicePolicy({'timezone': self._timezones[0]}, refresh=True)
# Timezones are set on startup, i.e. everytime when loading the login
# screen. Something like a browser restart may work, too.
self._Login(user_index=1, expect_success=True)
self.Logout()
self.assertEqual(self._timezones[0], self.GetTimeInfo()['timezone'])
self._Login(user_index=1, expect_success=True)
self.SetTimezone(self._timezones[1])
self.assertEqual(self._timezones[1], self.GetTimeInfo()['timezone'])
self.Logout()
self.assertEqual(self._timezones[0], self.GetTimeInfo()['timezone'])
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |
Weasyl/weasyl | libweasyl/libweasyl/alembic/versions/0dd45a91821c_remove_unused_premium_leftovers.py | 1 | 1462 | """Remove unused premium leftovers
Revision ID: 0dd45a91821c
Revises: fb60c8528489
Create Date: 2019-09-16 16:44:00.647210
"""
# revision identifiers, used by Alembic.
revision = '0dd45a91821c'
down_revision = 'fb60c8528489'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("UPDATE login SET settings = replace(settings, 'd', '') WHERE settings ~ 'd'")
op.drop_table('userpremium')
op.drop_table('premiumpurchase')
def downgrade():
op.create_table('premiumpurchase',
sa.Column('token', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('email', sa.VARCHAR(length=254), autoincrement=False, nullable=False),
sa.Column('terms', sa.SMALLINT(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('token', name=u'premiumpurchase_pkey')
)
op.create_table('userpremium',
sa.Column('userid', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('unixtime', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('terms', sa.SMALLINT(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(['userid'], [u'login.userid'], name=u'userpremium_userid_fkey', onupdate=u'CASCADE', ondelete=u'CASCADE'),
sa.PrimaryKeyConstraint('userid', name=u'userpremium_pkey')
)
op.execute("UPDATE login SET settings = login.settings || 'd' FROM profile WHERE login.settings !~ 'd' AND login.userid = profile.userid AND profile.config ~ 'd'")
| apache-2.0 |
Argonne-National-Laboratory/ProMC | protobuf/gtest/test/gtest_shuffle_test.py | 3023 | 12549 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| lgpl-3.0 |
denovogroup/pox | pox/info/packet_dump.py | 46 | 2821 | # Copyright 2012 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A simple component that dumps packet_in info to the log.
Use --verbose for really verbose dumps.
Use --show to show all packets.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
import pox.lib.packet as pkt
from pox.lib.util import dpidToStr
log = core.getLogger()
_verbose = None
_max_length = None
_types = None
_show_by_default = None
def _handle_PacketIn (event):
packet = event.parsed
show = _show_by_default
p = packet
while p:
if p.__class__.__name__.lower() in _types:
if _show_by_default:
# This packet is hidden
return
else:
# This packet should be shown
show = True
break
return
if not hasattr(p, 'next'): break
p = p.next
if not show: return
msg = dpidToStr(event.dpid) + ": "
msg = ""
if _verbose:
msg += packet.dump()
else:
p = packet
while p:
if isinstance(p, basestring):
msg += "[%s bytes]" % (len(p),)
break
msg += "[%s]" % (p.__class__.__name__,)
p = p.next
if _max_length:
if len(msg) > _max_length:
msg = msg[:_max_length-3]
msg += "..."
core.getLogger("dump:" + dpidToStr(event.dpid)).debug(msg)
def launch (verbose = False, max_length = 110, full_packets = True,
hide = False, show = False):
global _verbose, _max_length, _types, _show_by_default
_verbose = verbose
_max_length = max_length
force_show = (show is True) or (hide is False and show is False)
if isinstance(hide, basestring):
hide = hide.replace(',', ' ').replace('|', ' ')
hide = set([p.lower() for p in hide.split()])
else:
hide = set()
if isinstance(show, basestring):
show = show.replace(',', ' ').replace('|', ' ')
show = set([p.lower() for p in show.split()])
else:
show = set()
if hide and show:
raise RuntimeError("Can't both show and hide packet types")
if show:
_types = show
else:
_types = hide
_show_by_default = not not hide
if force_show:
_show_by_default = force_show
if full_packets:
# Send full packets to controller
core.openflow.miss_send_len = 0xffff
core.openflow.addListenerByName("PacketIn", _handle_PacketIn)
log.info("Packet dumper running")
| apache-2.0 |
cawka/pybindgen | pybindgen/typehandlers/base.py | 6 | 55840 | ## -*- python -*-
## pylint: disable-msg=W0142,R0921
"""
Base classes for all parameter/return type handlers,
and base interfaces for wrapper generators.
"""
from pybindgen.typehandlers import codesink
import warnings
from pybindgen.typehandlers import ctypeparser
import sys
PY3 = (sys.version_info[0] >= 3)
if PY3:
import types
string_types = str,
else:
string_types = basestring
import logging
logger = logging.getLogger("pybindgen.typehandlers")
try:
all
except NameError: # for compatibility with Python < 2.5
def all(iterable):
"Returns True if all elements are true"
for element in iterable:
if not element:
return False
return True
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
class CodegenErrorBase(Exception):
pass
class NotSupportedError(CodegenErrorBase):
"""Exception that is raised when declaring an interface configuration
that is not supported or not implemented."""
class CodeGenerationError(CodegenErrorBase):
"""Exception that is raised when wrapper generation fails for some reason."""
class TypeLookupError(CodegenErrorBase):
"""Exception that is raised when lookup of a type handler fails"""
class TypeConfigurationError(CodegenErrorBase):
"""Exception that is raised when a type handler does not find some
information it needs, such as owernship transfer semantics."""
def join_ctype_and_name(ctype, name):
"""
Utility method that joins a C type and a variable name into
a single string
>>> join_ctype_and_name('void*', 'foo')
'void *foo'
>>> join_ctype_and_name('void *', 'foo')
'void *foo'
>>> join_ctype_and_name("void**", "foo")
'void **foo'
>>> join_ctype_and_name("void **", "foo")
'void **foo'
>>> join_ctype_and_name('C*', 'foo')
'C *foo'
"""
if ctype[-1] == '*':
for i in range(-1, -len(ctype) - 1, -1):
if ctype[i] != '*':
if ctype[i] == ' ':
return "".join([ctype[:i+1], ctype[i+1:], name])
else:
return "".join([ctype[:i+1], ' ', ctype[i+1:], name])
raise ValueError((ctype, name))
else:
return " ".join([ctype, name])
class CodeBlock(object):
'''An intelligent code block that keeps track of cleanup actions.
This object is to be used by TypeHandlers when generating code.'''
class CleanupHandle(object):
"""Handle for some cleanup code"""
__slots__ = ['code_block', 'position']
def __init__(self, code_block, position):
"""Create a handle given code_block and position"""
self.code_block = code_block
self.position = position
def __cmp__(self, other):
comp = cmp(self.code_block, other.code_block)
if comp:
return comp
return cmp(self.position, other.position)
def cancel(self):
"""Cancel the cleanup code"""
self.code_block.remove_cleanup_code(self)
def get_position(self):
"returns the cleanup code relative position"
return self.position
def __init__(self, error_return, declarations, predecessor=None):
'''
CodeBlock constructor
>>> block = CodeBlock("return NULL;", DeclarationsScope())
>>> block.write_code("foo();")
>>> cleanup1 = block.add_cleanup_code("clean1();")
>>> cleanup2 = block.add_cleanup_code("clean2();")
>>> cleanup3 = block.add_cleanup_code("clean3();")
>>> cleanup2.cancel()
>>> block.write_error_check("error()", "error_clean()")
>>> block.write_code("bar();")
>>> block.write_cleanup()
>>> print block.sink.flush().rstrip()
foo();
if (error()) {
error_clean()
clean3();
clean1();
return NULL;
}
bar();
clean3();
clean1();
:param error_return: code that is generated on error conditions
(detected by write_error_check()); normally
it returns from the wrapper function,
e.g. return NULL;
:param predecessor: optional predecessor code block; a
predecessor is used to search for additional
cleanup actions.
'''
assert isinstance(declarations, DeclarationsScope)
assert predecessor is None or isinstance(predecessor, CodeBlock)
self.sink = codesink.MemoryCodeSink()
self.predecessor = predecessor
self._cleanup_actions = {}
self._last_cleanup_position = 0
self.error_return = error_return
self.declarations = declarations
def clear(self):
self._cleanup_actions = {}
self._last_cleanup_position = 0
self.sink = codesink.MemoryCodeSink()
def declare_variable(self, type_, name, initializer=None, array=None):
"""
Calls declare_variable() on the associated DeclarationsScope object.
"""
if ':' in name:
raise ValueError("invalid variable name: %s " % name)
return self.declarations.declare_variable(type_, name, initializer, array)
def write_code(self, code):
'''Write out some simple code'''
self.sink.writeln(code)
def indent(self, level=4):
'''Add a certain ammount of indentation to all lines written
from now on and until unindent() is called'''
self.sink.indent(level)
def unindent(self):
'''Revert indentation level to the value before last indent() call'''
self.sink.unindent()
def add_cleanup_code(self, cleanup_code):
'''Add a chunk of code used to cleanup previously allocated resources
Returns a handle used to cancel the cleanup code
'''
self._last_cleanup_position += 1
handle = self.CleanupHandle(self, self._last_cleanup_position)
self._cleanup_actions[handle.get_position()] = cleanup_code
return handle
def remove_cleanup_code(self, handle):
'''Remove cleanup code previously added with add_cleanup_code()
'''
assert isinstance(handle, self.CleanupHandle)
del self._cleanup_actions[handle.get_position()]
def get_cleanup_code(self):
'''return a new list with all cleanup actions, including the
ones from predecessor code blocks; Note: cleanup actions are
executed in reverse order than when they were added.'''
cleanup = []
items = list(self._cleanup_actions.items())
items.sort()
for dummy, code in items:
cleanup.append(code)
cleanup.reverse()
if self.predecessor is not None:
cleanup.extend(self.predecessor.get_cleanup_code())
return cleanup
def write_error_check(self, failure_expression, failure_cleanup=None):
'''Add a chunk of code that checks for a possible error
:param failure_expression: C boolean expression that is true when
an error occurred
:param failure_cleanup: optional extra cleanup code to write only
for the the case when failure_expression is
true; this extra cleanup code comes before
all other cleanup code previously registered.
'''
self.sink.writeln("if (%s) {" % (failure_expression,))
self.sink.indent()
if failure_cleanup is not None:
self.sink.writeln(failure_cleanup)
self.write_error_return()
self.sink.unindent()
self.sink.writeln("}")
def write_cleanup(self):
"""Write the current cleanup code."""
for cleanup_action in self.get_cleanup_code():
self.sink.writeln(cleanup_action)
def write_error_return(self):
'''Add a chunk of code that cleans up and returns an error.
'''
self.write_cleanup()
self.sink.writeln(self.error_return)
class ParseTupleParameters(object):
"Object to keep track of PyArg_ParseTuple (or similar) parameters"
def __init__(self):
"""
>>> tuple_params = ParseTupleParameters()
>>> tuple_params.add_parameter('i', ['&foo'], 'foo')
1
>>> tuple_params.add_parameter('s', ['&bar'], 'bar', optional=True)
2
>>> tuple_params.get_parameters()
['"i|s"', '&foo', '&bar']
>>> tuple_params.get_keywords()
['foo', 'bar']
>>> tuple_params = ParseTupleParameters()
>>> tuple_params.add_parameter('i', ['&foo'], 'foo')
1
>>> tuple_params.add_parameter('s', ['&bar'], 'bar', prepend=True)
2
>>> tuple_params.get_parameters()
['"si"', '&bar', '&foo']
>>> tuple_params.get_keywords()
['bar', 'foo']
>>> tuple_params = ParseTupleParameters()
>>> tuple_params.add_parameter('i', ['&foo'])
1
>>> print tuple_params.get_keywords()
None
"""
self._parse_tuple_items = [] # (template, param_values, param_name, optional)
def clear(self):
self._parse_tuple_items = []
def add_parameter(self, param_template, param_values, param_name=None,
prepend=False, optional=False):
"""
Adds a new parameter specification
:param param_template: template item, see documentation for
PyArg_ParseTuple for more information
:param param_values: list of parameters, see documentation
for PyArg_ParseTuple for more information
:param prepend: whether this parameter should be parsed first
:param optional: whether the parameter is optional; note that after
the first optional parameter, all remaining
parameters must also be optional
"""
assert isinstance(param_values, list)
assert isinstance(param_template, string_types)
item = (param_template, param_values, param_name, optional)
if prepend:
self._parse_tuple_items.insert(0, item)
else:
self._parse_tuple_items.append(item)
return len(self._parse_tuple_items)
def is_empty(self):
return self.get_parameters() == ['""']
def get_parameters(self):
"""
returns a list of parameters to pass into a
PyArg_ParseTuple-style function call, the first paramter in
the list being the template string.
"""
template = ['"']
last_was_optional = False
for (param_template, dummy,
param_name, optional) in self._parse_tuple_items:
if last_was_optional and not optional:
raise ValueError("Error: optional parameter followed by a non-optional one (%r)"
" (debug: self._parse_tuple_parameters=%r)" % (param_name, self._parse_tuple_items))
if not last_was_optional and optional:
template.append('|')
last_was_optional = True
template.append(param_template)
template.append('"')
params = [''.join(template)]
for (dummy, param_values,
dummy, dummy) in self._parse_tuple_items:
params.extend(param_values)
return params
def get_keywords(self):
"""
returns list of keywords (parameter names), or None if none of
the parameters had a name; should only be called if names were
given for all parameters or none of them.
"""
keywords = []
for (dummy, dummy, name, dummy) in self._parse_tuple_items:
if name is None:
if keywords:
raise ValueError("mixing parameters with and without keywords")
else:
keywords.append(name)
if keywords:
if len(keywords) != len(self._parse_tuple_items):
raise ValueError("mixing parameters with and without keywords")
return keywords
else:
return None
class BuildValueParameters(object):
"Object to keep track of Py_BuildValue (or similar) parameters"
def __init__(self):
"""
>>> bld = BuildValueParameters()
>>> bld.add_parameter('i', [123, 456])
>>> bld.add_parameter('s', ["hello"])
>>> bld.get_parameters()
['"is"', 123, 456, 'hello']
>>> bld = BuildValueParameters()
>>> bld.add_parameter('i', [123])
>>> bld.add_parameter('s', ["hello"], prepend=True)
>>> bld.get_parameters()
['"si"', 'hello', 123]
"""
self._build_value_items = [] # (template, param_value, cleanup_handle)
def clear(self):
self._build_value_items = []
def add_parameter(self, param_template, param_values,
prepend=False, cancels_cleanup=None):
"""
Adds a new parameter to the Py_BuildValue (or similar) statement.
:param param_template: template item, see documentation for
Py_BuildValue for more information
:param param_values: list of C expressions to use as value, see documentation
for Py_BuildValue for more information
:param prepend: whether this parameter should come first in the tuple being built
:param cancels_cleanup: optional handle to a cleanup action,
that is removed after the call. Typically
this is used for 'N' parameters, which
already consume an object reference
"""
item = (param_template, param_values, cancels_cleanup)
if prepend:
self._build_value_items.insert(0, item)
else:
self._build_value_items.append(item)
def get_parameters(self, force_tuple_creation=False):
"""returns a list of parameters to pass into a
Py_BuildValue-style function call, the first paramter in
the list being the template string.
:param force_tuple_creation: if True, Py_BuildValue is
instructed to always create a tuple, even for zero or 1
values.
"""
template = ['"']
if force_tuple_creation:
template.append('(')
params = [None]
for (param_template, param_values, dummy) in self._build_value_items:
template.append(param_template)
params.extend(param_values)
if force_tuple_creation:
template.append(')')
template.append('"')
params[0] = ''.join(template)
return params
def get_cleanups(self):
"""Get a list of handles to cleanup actions"""
return [cleanup for (dummy, dummy, cleanup) in self._build_value_items]
class DeclarationsScope(object):
"""Manages variable declarations in a given scope."""
def __init__(self, parent_scope=None):
"""
Constructor
>>> scope = DeclarationsScope()
>>> scope.declare_variable('int', 'foo')
'foo'
>>> scope.declare_variable('char*', 'bar')
'bar'
>>> scope.declare_variable('int', 'foo')
'foo2'
>>> scope.declare_variable('int', 'foo', '1')
'foo3'
>>> scope.declare_variable('const char *', 'kwargs', '{"hello", NULL}', '[]')
'kwargs'
>>> print scope.get_code_sink().flush().rstrip()
int foo;
char *bar;
int foo2;
int foo3 = 1;
const char *kwargs[] = {"hello", NULL};
:param parent_scope: optional 'parent scope'; if given,
declarations in this scope will avoid clashing
with names in the parent scope, and vice
versa.
"""
self._declarations = codesink.MemoryCodeSink()
## name -> number of variables with that name prefix
if parent_scope is None:
self.declared_variables = {}
else:
assert isinstance(parent_scope, DeclarationsScope)
self.declared_variables = parent_scope.declared_variables
def clear(self):
self._declarations = codesink.MemoryCodeSink()
self.declared_variables.clear()
def declare_variable(self, type_, name, initializer=None, array=None):
"""Add code to declare a variable. Returns the actual variable
name used (uses 'name' as base, with a number in case of conflict.)
:param type_: C type name of the variable
:param name: base name of the variable; actual name used can be
slightly different in case of name conflict.
:param initializer: optional, value to initialize the variable with
:param array: optional, array size specifiction, e.g. '[]', or '[100]'
"""
try:
num = self.declared_variables[name]
except KeyError:
num = 0
num += 1
self.declared_variables[name] = num
if num == 1:
varname = name
else:
varname = "%s%i" % (name, num)
decl = join_ctype_and_name(type_, varname)
if array is not None:
decl += array
if initializer is not None:
decl += ' = ' + initializer
self._declarations.writeln(decl + ';')
return varname
def reserve_variable(self, name):
"""Reserve a variable name, to be used later.
:param name: base name of the variable; actual name used can be
slightly different in case of name conflict.
"""
try:
num = self.declared_variables[name]
except KeyError:
num = 0
num += 1
self.declared_variables[name] = num
if num == 1:
varname = name
else:
varname = "%s%i" % (name, num)
return varname
def get_code_sink(self):
"""Returns the internal MemoryCodeSink that holds all declararions."""
return self._declarations
class ReverseWrapperBase(object):
"""Generic base for all reverse wrapper generators.
Reverse wrappers all have the following general structure in common:
1. 'declarations' -- variable declarations; for compatibility with
older C compilers it is very important that all declarations
come before any simple statement. Declarations can be added
with the add_declaration() method on the 'declarations'
attribute. Two standard declarations are always predeclared:
'<return-type> retval', unless return-type is void, and 'PyObject
\\*py_retval';
2. 'code before call' -- this is a code block dedicated to contain
all code that is needed before calling into Python; code can be
freely added to it by accessing the 'before_call' (a CodeBlock
instance) attribute;
3. 'call into python' -- this is realized by a
PyObject_CallMethod(...) or similar Python API call; the list
of parameters used in this call can be customized by accessing
the 'build_params' (a BuildValueParameters instance) attribute;
4. 'code after call' -- this is a code block dedicated to contain
all code that must come after calling into Python; code can be
freely added to it by accessing the 'after_call' (a CodeBlock
instance) attribute;
5. A 'return retval' statement (or just 'return' if return_value is void)
"""
NO_GIL_LOCKING = False
def __init__(self, return_value, parameters, error_return=None):
'''
Base constructor
:param return_value: type handler for the return value
:param parameters: a list of type handlers for the parameters
'''
assert isinstance(return_value, TypeHandler)
assert isinstance(parameters, list)
assert all([isinstance(param, Parameter) for param in parameters])
self.return_value = return_value
self.parameters = parameters
if error_return is None:
error_return = return_value.get_c_error_return()
self.error_return = error_return
self.declarations = DeclarationsScope()
self.before_call = CodeBlock(error_return, self.declarations)
self.after_call = CodeBlock(error_return, self.declarations,
predecessor=self.before_call)
self.build_params = BuildValueParameters()
self.parse_params = ParseTupleParameters()
self._generate_gil_code()
def set_error_return(self, error_return):
self.error_return = error_return
self.before_call.error_return = error_return
self.after_call.error_return = error_return
def reset_code_generation_state(self):
self.declarations.clear()
self.before_call.clear()
self.after_call.clear()
self.build_params.clear()
self.parse_params.clear()
self._generate_gil_code()
def _generate_gil_code(self):
if self.NO_GIL_LOCKING:
return
## reverse wrappers are called from C/C++ code, when the Python GIL may not be held...
gil_state_var = self.declarations.declare_variable('PyGILState_STATE', '__py_gil_state')
self.before_call.write_code('%s = (PyEval_ThreadsInitialized() ? PyGILState_Ensure() : (PyGILState_STATE) 0);'
% gil_state_var)
self.before_call.add_cleanup_code('if (PyEval_ThreadsInitialized())\n'
' PyGILState_Release(%s);' % gil_state_var)
def generate_python_call(self):
"""Generates the code (into self.before_call) to call into
Python, storing the result in the variable 'py_retval'; should
also check for call error.
"""
raise NotImplementedError
def generate(self, code_sink, wrapper_name, decl_modifiers=('static',),
decl_post_modifiers=()):
"""Generate the wrapper
:param code_sink: a CodeSink object that will receive the code
:param wrapper_name: C/C++ identifier of the function/method to generate
:param decl_modifiers: list of C/C++ declaration modifiers, e.g. 'static'
"""
assert isinstance(decl_modifiers, (list, tuple))
assert all([isinstance(mod, string_types) for mod in decl_modifiers])
#import sys
#print("generate", self, file=sys.stderr)
py_retval = self.declarations.declare_variable('PyObject*', 'py_retval')
assert py_retval == "py_retval", "py_retval already declared: "\
"generating the same wrapper twice without a reset() in between?"
if self.return_value.ctype != 'void' \
and not self.return_value.REQUIRES_ASSIGNMENT_CONSTRUCTOR \
and not self.return_value.NO_RETVAL_DECL:
self.declarations.declare_variable(self.return_value.ctype, 'retval')
## convert the input parameters
for param in self.parameters:
param.convert_c_to_python(self)
## generate_python_call should include something like
## self.after_call.write_error_check('py_retval == NULL')
self.generate_python_call()
## convert the return value(s)
self.return_value.convert_python_to_c(self)
if self.parse_params.is_empty():
self.before_call.write_error_check('py_retval != Py_None',
'PyErr_SetString(PyExc_TypeError, "function/method should return None");')
else:
## parse the return value
## this ensures that py_retval is always a tuple
self.before_call.write_code('py_retval = Py_BuildValue((char*) "(N)", py_retval);')
parse_tuple_params = ['py_retval']
parse_params = self.parse_params.get_parameters()
assert parse_params[0][0] == '"'
parse_params[0] = '(char *) ' + parse_params[0]
parse_tuple_params.extend(parse_params)
self.before_call.write_error_check('!PyArg_ParseTuple(%s)' %
(', '.join(parse_tuple_params),),
failure_cleanup='PyErr_Print();')
## cleanup and return
self.after_call.write_cleanup()
if self.return_value.ctype == 'void':
self.after_call.write_code('return;')
else:
self.after_call.write_code('return retval;')
## now write out the wrapper function itself
## open function
retline = list(decl_modifiers)
retline.append(self.return_value.ctype)
code_sink.writeln(' '.join(retline))
params_list = ', '.join([join_ctype_and_name(param.ctype, param.name)
for param in self.parameters])
code_sink.writeln("%s(%s)%s" % (wrapper_name, params_list,
' '.join([''] + list(decl_post_modifiers))))
## body
code_sink.writeln('{')
code_sink.indent()
self.declarations.get_code_sink().flush_to(code_sink)
code_sink.writeln()
self.before_call.sink.flush_to(code_sink)
self.after_call.sink.flush_to(code_sink)
## close function
code_sink.unindent()
code_sink.writeln('}')
class ForwardWrapperBase(object):
"""Generic base for all forward wrapper generators.
Forward wrappers all have the following general structure in common:
1. 'declarations' -- variable declarations; for compatibility
with older C compilers it is very important that all
declarations come before any simple statement.
Declarations can be added with the add_declaration()
method on the 'declarations' attribute. Two standard
declarations are always predeclared: '<return-type>
retval', unless return-type is void, and 'PyObject
\\*py_retval';
2. 'code before parse' -- code before the
PyArg_ParseTupleAndKeywords call; code can be freely added to
it by accessing the 'before_parse' (a CodeBlock instance)
attribute;
3. A PyArg_ParseTupleAndKeywords call; uses items from the
parse_params object;
4. 'code before call' -- this is a code block dedicated to contain
all code that is needed before calling the C function; code can be
freely added to it by accessing the 'before_call' (a CodeBlock
instance) attribute;
5. 'call into C' -- this is realized by a C/C++ call; the list of
parameters that should be used is in the 'call_params' wrapper
attribute;
6. 'code after call' -- this is a code block dedicated to contain
all code that must come after calling into Python; code can be
freely added to it by accessing the 'after_call' (a CodeBlock
instance) attribute;
7. A py_retval = Py_BuildValue(...) call; this call can be
customized, so that out/inout parameters can add additional
return values, by accessing the 'build_params' (a
BuildValueParameters instance) attribute;
8. Cleanup and return.
Object constructors cannot return values, and so the step 7 is to
be omitted for them.
"""
PARSE_TUPLE = 1
PARSE_TUPLE_AND_KEYWORDS = 2
HAVE_RETURN_VALUE = False # change to true if the wrapper
# generates a return value even if the
# return_value attribute is None
def __init__(self, return_value, parameters,
parse_error_return, error_return,
force_parse=None, no_c_retval=False,
unblock_threads=False):
'''
Base constructor
:param return_value: type handler for the return value
:param parameters: a list of type handlers for the parameters
:param parse_error_return: statement to return an error during parameter parsing
:param error_return: statement to return an error after parameter parsing
:param force_parse: force generation of code to parse parameters even if there are none
:param no_c_retval: force the wrapper to not have a C return value
:param unblock_threads: generate code to unblock python threads during the C function call
'''
assert isinstance(return_value, ReturnValue) or return_value is None
assert isinstance(parameters, list)
assert all([isinstance(param, Parameter) for param in parameters])
self.return_value = return_value
self.parameters = parameters
self.declarations = DeclarationsScope()
self.before_parse = CodeBlock(parse_error_return, self.declarations)
self.before_call = CodeBlock(parse_error_return, self.declarations,
predecessor=self.before_parse)
self.after_call = CodeBlock(error_return, self.declarations,
predecessor=self.before_call)
self.build_params = BuildValueParameters()
self.parse_params = ParseTupleParameters()
self.call_params = []
self.force_parse = force_parse
self.meth_flags = []
self.unblock_threads = unblock_threads
self.no_c_retval = no_c_retval
self.overload_index = None
self.deprecated = False
# The following 3 variables describe the C wrapper function
# prototype; do not confuse with the python function/method!
self.wrapper_actual_name = None # name of the wrapper function/method
self.wrapper_return = None # C type expression for the wrapper return
self.wrapper_args = None # list of arguments to the wrapper function
self._init_code_generation_state()
def _init_code_generation_state(self):
if self.return_value is not None or self.HAVE_RETURN_VALUE:
self.declarations.declare_variable('PyObject*', 'py_retval')
if (not self.no_c_retval and (self.return_value is not None or self.HAVE_RETURN_VALUE)
and self.return_value.ctype != 'void'
and not self.return_value.REQUIRES_ASSIGNMENT_CONSTRUCTOR
and not self.return_value.NO_RETVAL_DECL):
self.declarations.declare_variable(str(self.return_value.type_traits.ctype_no_const_no_ref), 'retval')
self.declarations.reserve_variable('args')
self.declarations.reserve_variable('kwargs')
def reset_code_generation_state(self):
self.declarations.clear()
self.before_parse.clear()
self.before_call.clear()
self.after_call.clear()
self.build_params.clear()
self.parse_params.clear()
self.call_params = []
self.meth_flags = []
self._init_code_generation_state()
def set_parse_error_return(self, parse_error_return):
self.before_parse.error_return = parse_error_return
self.before_call.error_return = parse_error_return
def generate_call(self):
"""Generates the code (into self.before_call) to call into
Python, storing the result in the variable 'py_retval'; should
also check for call error.
"""
raise NotImplementedError
def _before_call_hook(self):
"""
Optional hook that lets subclasses add code after all
parameters are parsed, but before the C function/method call.
Subclasses may add code to self.before_call.
"""
pass
def _before_return_hook(self):
"""
Optional hook that lets subclasses add code after all
parameters are parsed and after the function C return value is
processed, but after the python wrapper return value (py_ret)
is built and returned. Subclasses may add code to
self.after_call, which will be placed before py_ret is
created.
"""
pass
def write_open_wrapper(self, code_sink, add_static=False):
assert self.wrapper_actual_name is not None
assert self.wrapper_return is not None
assert isinstance(self.wrapper_args, list)
if add_static:
code_sink.writeln("static " + self.wrapper_return)
else:
code_sink.writeln(self.wrapper_return)
code_sink.writeln("%s(%s)" % (self.wrapper_actual_name, ', '.join(self.wrapper_args)))
code_sink.writeln('{')
code_sink.indent()
def write_close_wrapper(self, code_sink):
code_sink.unindent()
code_sink.writeln('}')
def generate_body(self, code_sink, gen_call_params=()):
"""Generate the wrapper function body
code_sink -- a CodeSink object that will receive the code
"""
if self.unblock_threads:
py_thread_state = self.declarations.declare_variable("PyThreadState*", "py_thread_state", "NULL")
self.after_call.write_code(
"\nif (%s)\n"
" PyEval_RestoreThread(%s);\n" % (py_thread_state, py_thread_state))
## convert the input parameters
for param in self.parameters:
try:
param.convert_python_to_c(self)
except NotImplementedError:
raise CodeGenerationError(
'convert_python_to_c method of parameter %s not implemented'
% (param.ctype,))
if self.deprecated:
if isinstance(self.deprecated, string_types):
msg = self.deprecated
else:
msg = "Deprecated"
self.before_call.write_error_check( 'PyErr_Warn(PyExc_DeprecationWarning, (char *) "%s")' % msg)
self._before_call_hook()
if self.unblock_threads:
self.before_call.write_code(
"\nif (PyEval_ThreadsInitialized ())\n"
" %s = PyEval_SaveThread();\n"
% (py_thread_state, ))
self.generate_call(*gen_call_params)
params = self.parse_params.get_parameters()
assert params[0][0] == '"'
params_empty = (params == ['""'])
params[0] = '(char *) ' + params[0]
keywords = self.parse_params.get_keywords()
if not params_empty or self.force_parse != None:
self.meth_flags.append("METH_VARARGS")
if keywords is None \
and self.force_parse != self.PARSE_TUPLE_AND_KEYWORDS:
param_list = ['args'] + params
self.before_parse.write_error_check('!PyArg_ParseTuple(%s)' %
(', '.join(param_list),))
else:
if keywords is None:
keywords = []
keywords_var = self.declarations.declare_variable(
'const char *', 'keywords',
'{' + ', '.join(['"%s"' % kw for kw in keywords] + ['NULL']) + '}',
'[]')
param_list = ['args', 'kwargs', params[0], '(char **) ' + keywords_var] + params[1:]
self.before_parse.write_error_check('!PyArg_ParseTupleAndKeywords(%s)' %
(', '.join(param_list),))
self.meth_flags.append("METH_KEYWORDS")
else:
self.meth_flags.append("METH_NOARGS")
## convert the return value(s)
if self.return_value is None and not self.HAVE_RETURN_VALUE:
assert self.build_params.get_parameters() == ['""'], \
"this wrapper is not supposed to return values"
self._before_return_hook()
self.after_call.write_cleanup()
else:
if self.return_value is not None:
try:
self.return_value.convert_c_to_python(self)
except NotImplementedError:
raise CodeGenerationError(
'convert_c_to_python method of return value %s not implemented'
% (self.return_value.ctype,))
self._before_return_hook()
params = self.build_params.get_parameters()
if params:
if params == ['""']:
self.after_call.write_code('Py_INCREF(Py_None);')
self.after_call.write_code('py_retval = Py_None;')
else:
assert params[0][0] == '"'
params[0] = "(char *) " + params[0]
self.after_call.write_code('py_retval = Py_BuildValue(%s);' %
(', '.join(params),))
## cleanup and return
self.after_call.write_cleanup()
self.after_call.write_code('return py_retval;')
## now write out the wrapper function body itself
self.declarations.get_code_sink().flush_to(code_sink)
code_sink.writeln()
self.before_parse.sink.flush_to(code_sink)
self.before_call.sink.flush_to(code_sink)
self.after_call.sink.flush_to(code_sink)
def get_py_method_def_flags(self):
"""
Get a list of PyMethodDef flags that should be used for this wrapper.
"""
flags = set(self.meth_flags)
if flags:
return list(flags)
tmp_sink = codesink.NullCodeSink()
try:
# try:
# self.generate_body(tmp_sink)
# except CodegenErrorBase:
# return []
# else:
# return list(set(self.meth_flags))
self.generate_body(tmp_sink)
return list(set(self.meth_flags))
finally:
self.reset_code_generation_state()
class TypeTransformation(object):
"""
Type transformations are used to register handling of special
types that are simple transformation over another type that is
already registered. This way, only the original type is
registered, and the type transformation only does the necessary
adjustments over the original type handler to make it handle the
transformed type as well.
This is typically used to get smart pointer templated types
working.
"""
def get_untransformed_name(self, name):
"""
Given a transformed named, get the original C type name.
E.g., given a smart pointer transformation, MySmartPointer::
get_untransformed_name('MySmartPointer<Foo>') -> 'Foo\\*'
"""
raise NotImplementedError
def create_type_handler(self, type_handler_class, *args, **kwargs):
"""
Given a type_handler class, create an instance with proper customization.
:param type_handler_class: type handler class
:param args: arguments
:param kwargs: keywords arguments
"""
raise NotImplementedError
def transform(self, type_handler, declarations, code_block, value):
"""
Transforms a value expression of the original type to an
equivalent value expression in the transformed type.
Example, with the transformation::
'T\\*' -> 'boost::shared_ptr<T>'
Then::
transform(wrapper, 'foo') -> 'boost::shared_ptr<%s>(foo)' % type_handler.untransformed_ctype
"""
raise NotImplementedError
def untransform(self, type_handler, declarations, code_block, value):
"""
Transforms a value expression of the transformed type to an
equivalent value expression in the original type.
Example, with the transformation::
'T\\*' -> 'boost::shared_ptr<T>'
Then::
untransform(wrapper, 'foo') -> 'foo->get_pointer()'
"""
raise NotImplementedError
class NullTypeTransformation(object):
"""
Null type transformation, returns everything unchanged.
"""
def get_untransformed_name(self, name):
"identity transformation"
return name
def create_type_handler(self, type_handler_class, *args, **kwargs):
"identity transformation"
return type_handler_class(*args, **kwargs)
def transform(self, type_handler, declarations, code_block, value):
"identity transformation"
return value
def untransform(self, type_handler, declarations, code_block, value):
"identity transformation"
return value
class TypeHandler(object):
SUPPORTS_TRANSFORMATIONS = False
def __init__(self, ctype, is_const=False):
if ctype is None:
self.ctype = None
self.untransformed_ctype = None
self.type_traits = None
else:
if isinstance(ctype, ctypeparser.TypeTraits):
self.type_traits = ctype
if is_const:
warnings.warn("is_const is deprecated, put a 'const' in the C type instead.", DeprecationWarning)
if self.type_traits.type_is_pointer or self.type_traits.type_is_reference:
self.type_traits.make_target_const()
else:
self.type_traits.make_const()
elif isinstance(ctype, string_types):
if is_const:
warnings.warn("is_const is deprecated, put a 'const' in the C type instead.", DeprecationWarning)
self.type_traits = ctypeparser.TypeTraits('const %s' % ctype)
else:
self.type_traits = ctypeparser.TypeTraits(ctype)
else:
raise TypeError
self.ctype = str(self.type_traits.ctype)
self.untransformed_ctype = self.ctype
self.transformation = NullTypeTransformation()
def _get_ctype_no_const(self):
return str(self.type_traits.ctype_no_const)
ctype_no_const = property(_get_ctype_no_const)
def set_tranformation(self, transformation, untransformed_ctype):
warnings.warn("Typo: set_tranformation -> set_transformation", DeprecationWarning, stacklevel=2)
return self.set_transformation(transformation, untransformed_ctype)
def set_transformation(self, transformation, untransformed_ctype):
"Set the type transformation to use in this type handler"
assert isinstance(transformation, TypeTransformation)
assert untransformed_ctype != self.ctype
assert isinstance(self.transformation, NullTypeTransformation)
assert self.SUPPORTS_TRANSFORMATIONS
self.transformation = transformation
self.untransformed_ctype = untransformed_ctype
class ReturnValueMeta(type):
"Metaclass for automatically registering parameter type handlers"
def __init__(mcs, name, bases, dict_):
"metaclass __init__"
type.__init__(mcs, name, bases, dict_)
if __debug__:
try:
iter(mcs.CTYPES)
except (TypeError, AttributeError):
sys.stderr.write("ERROR: missing CTYPES on class %s.%s\n" % (mcs.__module__, mcs.__name__))
for ctype in mcs.CTYPES:
return_type_matcher.register(ctype, mcs)
class _ReturnValue(TypeHandler):
'''Abstract base class for all classes dedicated to handle
specific return value types'''
## list of C type names it can handle
CTYPES = []
## whether it supports type transformations
SUPPORTS_TRANSFORMATIONS = False
REQUIRES_ASSIGNMENT_CONSTRUCTOR = False
NO_RETVAL_DECL = False
#@classmethod
def new(cls, *args, **kwargs):
"""
>>> import inttype
>>> isinstance(ReturnValue.new('int'), inttype.IntReturn)
True
"""
if cls is ReturnValue:
ctype = args[0]
type_handler_class, transformation, type_traits = \
return_type_matcher.lookup(ctype)
assert type_handler_class is not None
if transformation is None:
args = list(args)
args[0] = type_traits
args = tuple(args)
try:
return type_handler_class(*args, **kwargs)
except TypeError:
ex = sys.exc_info()[1]
warnings.warn("Exception %r in type handler %s constructor" % (str(ex), type_handler_class))
raise
else:
return transformation.create_type_handler(type_handler_class, *args, **kwargs)
else:
return cls(*args, **kwargs)
new = classmethod(new)
def __init__(self, ctype, is_const=False):
'''
Creates a return value object
Keywork Arguments:
:param ctype: actual C/C++ type being used
'''
if type(self) is ReturnValue:
raise TypeError('ReturnValue is an abstract class; use ReturnValue.new(...)')
super(_ReturnValue, self).__init__(ctype, is_const)
self.value = 'retval'
def get_c_error_return(self):
'''Return a "return <value>" code string, for use in case of error'''
raise NotImplementedError
def convert_python_to_c(self, wrapper):
'''
Writes code to convert the Python return value into the C "retval" variable.
'''
raise NotImplementedError
#assert isinstance(wrapper, ReverseWrapperBase)
def convert_c_to_python(self, wrapper):
'''
Writes code to convert the C return value the Python return.
'''
raise NotImplementedError
#assert isinstance(wrapper, ReverseWrapperBase)
if PY3:
ReturnValue = types.new_class("ReturnValue", (_ReturnValue,), dict(metaclass=ReturnValueMeta))
else:
class ReturnValue(_ReturnValue):
__metaclass__ = ReturnValueMeta
ReturnValue.CTYPES = NotImplemented
class PointerReturnValue(ReturnValue):
"""Base class for all pointer-to-something handlers"""
CTYPES = []
def __init__(self, ctype, is_const=False, caller_owns_return=None):
super(PointerReturnValue, self).__init__(ctype, is_const)
self.call_owns_return = caller_owns_return
PointerReturnValue.CTYPES = NotImplemented
class ParameterMeta(type):
"Metaclass for automatically registering parameter type handlers"
def __init__(mcs, name, bases, dict_):
"metaclass __init__"
type.__init__(mcs, name, bases, dict_)
if __debug__:
try:
iter(mcs.CTYPES)
except TypeError:
sys.stderr.write("ERROR: missing CTYPES on class %s\n" % mcs)
for ctype in mcs.CTYPES:
param_type_matcher.register(ctype, mcs)
class _Parameter(TypeHandler):
'''Abstract base class for all classes dedicated to handle specific parameter types'''
## bit mask values
DIRECTION_IN = 1
DIRECTION_OUT = 2
DIRECTION_INOUT = DIRECTION_IN|DIRECTION_OUT
## list of possible directions for this type
DIRECTIONS = NotImplemented
## whether it supports type transformations
SUPPORTS_TRANSFORMATIONS = False
## list of C type names it can handle
CTYPES = []
def _direction_value_to_name(cls, value):
if value == cls.DIRECTION_IN:
return "DIRECTION_IN"
elif value == cls.DIRECTION_OUT:
return "DIRECTION_OUT"
elif value == cls.DIRECTION_INOUT:
return "DIRECTION_INOUT"
else:
return "(invalid %r)" % value
_direction_value_to_name = classmethod(_direction_value_to_name)
#@classmethod
def new(cls, *args, **kwargs):
"""
>>> import inttype
>>> isinstance(Parameter.new('int', 'name'), inttype.IntParam)
True
"""
if cls is Parameter:
# support calling Parameter("typename", ...)
ctype = args[0]
type_handler_class, transformation, type_traits = \
param_type_matcher.lookup(ctype)
assert type_handler_class is not None
if transformation is None:
args = list(args)
args[0] = type_traits
args = tuple(args)
try:
return type_handler_class(*args, **kwargs)
except TypeError:
_, ex, _ = sys.exc_info()
warnings.warn("Exception %r in type handler %s constructor" % (str(ex), type_handler_class))
raise
else:
return transformation.create_type_handler(type_handler_class, *args, **kwargs)
else:
return cls(*args, **kwargs)
new = classmethod(new)
def __init__(self, ctype, name, direction=DIRECTION_IN, is_const=False, default_value=None):
'''
Creates a parameter object
:param ctype: actual C/C++ type being used
:param name: parameter name
:param direction: direction of the parameter transfer, valid values
are DIRECTION_IN, DIRECTION_OUT, and
DIRECTION_IN|DIRECTION_OUT
'''
if type(self) is Parameter:
raise TypeError('Parameter is an abstract class; use Parameter.new(...)')
super(_Parameter, self).__init__(ctype, is_const)
self.name = name
assert direction in self.DIRECTIONS, \
"Error: requested direction %s for type handler %r (ctype=%r), but it only supports directions %r"\
% (self._direction_value_to_name(direction), type(self), self.ctype,
[self._direction_value_to_name(d) for d in self.DIRECTIONS])
self.direction = direction
self.value = name
self.default_value = default_value
def convert_c_to_python(self, wrapper):
'''Write some code before calling the Python method.'''
#assert isinstance(wrapper, ReverseWrapperBase)
raise NotImplementedError
def convert_python_to_c(self, wrapper):
'''Write some code before calling the C method.'''
#assert isinstance(wrapper, ReverseWrapperBase)
raise NotImplementedError
if PY3:
Parameter = types.new_class("Parameter", (_Parameter,), dict(metaclass=ParameterMeta))
else:
class Parameter(_Parameter):
__metaclass__ = ParameterMeta
Parameter.CTYPES = NotImplemented
class PointerParameter(Parameter):
"""Base class for all pointer-to-something handlers"""
CTYPES = []
def __init__(self, ctype, name, direction=Parameter.DIRECTION_IN, is_const=False, default_value=None,
transfer_ownership=False):
super(PointerParameter, self).__init__(ctype, name, direction, is_const, default_value)
self.transfer_ownership = transfer_ownership
PointerParameter.CTYPES = NotImplemented
class TypeMatcher(object):
"""
Type matcher object: maps C type names to classes that handle
those types.
"""
def __init__(self):
"""Constructor"""
self._types = {}
self._transformations = []
self._type_aliases = {}
self._type_aliases_rev = {}
def register_transformation(self, transformation):
"Register a type transformation object"
assert isinstance(transformation, TypeTransformation)
self._transformations.append(transformation)
def register(self, name, type_handler):
"""Register a new handler class for a given C type
:param name: C type name
:param type_handler: class to handle this C type
"""
name = ctypeparser.normalize_type_string(name)
if name in self._types:
raise ValueError("return type %s already registered" % (name,))
self._types[name] = type_handler
def _raw_lookup_with_alias_support(self, name):
already_tried = []
return self._raw_lookup_with_alias_support_recursive(name, already_tried)
def _raw_lookup_with_alias_support_recursive(self, name, already_tried):
try:
return self._types[name]
except KeyError:
aliases_to_try = []
try:
aliases_to_try.append(self._type_aliases[name])
except KeyError:
pass
try:
aliases_to_try.append(self._type_aliases_rev[name])
except KeyError:
pass
for alias in aliases_to_try:
if alias in already_tried:
continue
already_tried.append(name)
#if 'Time' in name or 'Time' in alias:
# import sys
# print >> sys.stderr, "**** trying name %r in place of %r" % (alias, name)
return self._raw_lookup_with_alias_support_recursive(alias, already_tried)
raise KeyError
def lookup(self, name):
"""
lookup(name) -> type_handler, type_transformation, type_traits
:param name: C type name, possibly transformed (e.g. MySmartPointer<Foo> looks up Foo*)
:returns: a handler with the given ctype name, or raises KeyError.
Supports type transformations.
"""
logger.debug("TypeMatcher.lookup(%r)", name)
given_type_traits = ctypeparser.TypeTraits(name)
noconst_name = str(given_type_traits.ctype_no_modifiers)
tried_names = [noconst_name]
try:
rv = self._raw_lookup_with_alias_support(noconst_name), None, given_type_traits
except KeyError:
logger.debug("try to lookup type handler for %r => failure", name)
## Now try all the type transformations
for transf in self._transformations:
untransformed_name = transf.get_untransformed_name(name)
if untransformed_name is None:
continue
untransformed_type_traits = ctypeparser.TypeTraits(untransformed_name)
untransformed_name = str(untransformed_type_traits.ctype_no_modifiers)
try:
rv = self._raw_lookup_with_alias_support(untransformed_name), transf, untransformed_type_traits
except KeyError as ex:
logger.debug("try to lookup type handler for %r => failure (%r)", untransformed_name, str(ex))
tried_names.append(untransformed_name)
continue
else:
logger.debug("try to lookup type handler for %r => success (%r)", untransformed_name, rv)
return rv
else:
#if 'Time' in name:
# existing = [k for k in self._types.iterkeys() if 'Time' in k]
# existing.sort()
# raise TypeLookupError((tried_names, existing, self._type_aliases))
#else:
raise TypeLookupError(tried_names)
else:
logger.debug("try to lookup type handler for %r => success (%r)", name, rv)
return rv
def items(self):
"Returns an iterator over all registered items"
return iter(self._types.items())
def add_type_alias(self, from_type_name, to_type_name):
from_type_name_normalized = str(ctypeparser.TypeTraits(from_type_name).ctype)
to_type_name_normalized = str(ctypeparser.TypeTraits(to_type_name).ctype)
self._type_aliases[to_type_name_normalized] = from_type_name_normalized
self._type_aliases_rev[from_type_name_normalized] = to_type_name_normalized
return_type_matcher = TypeMatcher()
param_type_matcher = TypeMatcher()
def add_type_alias(from_type_name, to_type_name):
return_type_matcher.add_type_alias(from_type_name, to_type_name)
param_type_matcher.add_type_alias(from_type_name, to_type_name)
| lgpl-2.1 |
rishig/zulip | zerver/logging_handlers.py | 1 | 6618 | # System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html
import logging
import platform
import os
import subprocess
import traceback
from typing import Any, Dict, Optional
from django.conf import settings
from django.http import HttpRequest
from django.views.debug import get_exception_reporter_filter
from zerver.lib.logging_util import find_log_caller_module
from zerver.lib.queue import queue_json_publish
from version import ZULIP_VERSION
def try_git_describe() -> Optional[str]:
try: # nocoverage
return subprocess.check_output(
['git', 'describe', '--tags', '--always', '--dirty', '--long'],
stderr=subprocess.PIPE,
cwd=os.path.join(os.path.dirname(__file__), '..'),
).strip().decode('utf-8')
except Exception: # nocoverage
return None
def add_deployment_metadata(report: Dict[str, Any]) -> None:
report['git_described'] = try_git_describe()
report['zulip_version_const'] = ZULIP_VERSION
version_path = os.path.join(os.path.dirname(__file__), '../version')
if os.path.exists(version_path):
report['zulip_version_file'] = open(version_path).read().strip() # nocoverage
def add_request_metadata(report: Dict[str, Any], request: HttpRequest) -> None:
report['has_request'] = True
report['path'] = request.path
report['method'] = request.method
report['remote_addr'] = request.META.get('REMOTE_ADDR', None),
report['query_string'] = request.META.get('QUERY_STRING', None),
report['server_name'] = request.META.get('SERVER_NAME', None),
try:
from django.contrib.auth.models import AnonymousUser
user_profile = request.user
if isinstance(user_profile, AnonymousUser):
user_full_name = None
user_email = None
else:
user_full_name = user_profile.full_name
user_email = user_profile.email
except Exception:
# Unexpected exceptions here should be handled gracefully
traceback.print_exc()
user_full_name = None
user_email = None
report['user_email'] = user_email
report['user_full_name'] = user_full_name
exception_filter = get_exception_reporter_filter(request)
try:
report['data'] = request.GET if request.method == 'GET' else \
exception_filter.get_post_parameters(request)
except Exception:
# exception_filter.get_post_parameters will throw
# RequestDataTooBig if there's a really big file uploaded
report['data'] = {}
try:
report['host'] = request.get_host().split(':')[0]
except Exception:
# request.get_host() will throw a DisallowedHost
# exception if the host is invalid
report['host'] = platform.node()
class AdminNotifyHandler(logging.Handler):
"""An logging handler that sends the log/exception to the queue to be
turned into an email and/or a Zulip message for the server admins.
"""
# adapted in part from django/utils/log.py
def __init__(self) -> None:
logging.Handler.__init__(self)
def emit(self, record: logging.LogRecord) -> None:
report = {} # type: Dict[str, Any]
# This parameter determines whether Zulip should attempt to
# send Zulip messages containing the error report. If there's
# syntax that makes the markdown processor throw an exception,
# we really don't want to send that syntax into a new Zulip
# message in exception handler (that's the stuff of which
# recursive exception loops are made).
#
# We initialize is_bugdown_rendering_exception to `True` to
# prevent the infinite loop of zulip messages by ERROR_BOT if
# the outer try block here throws an exception before we have
# a chance to check the exception for whether it comes from
# bugdown.
is_bugdown_rendering_exception = True
try:
report['node'] = platform.node()
report['host'] = platform.node()
add_deployment_metadata(report)
if record.exc_info:
stack_trace = ''.join(traceback.format_exception(*record.exc_info))
message = str(record.exc_info[1])
is_bugdown_rendering_exception = record.msg.startswith('Exception in Markdown parser')
else:
stack_trace = 'No stack trace available'
message = record.getMessage()
if '\n' in message:
# Some exception code paths in queue processors
# seem to result in super-long messages
stack_trace = message
message = message.split('\n')[0]
is_bugdown_rendering_exception = False
report['stack_trace'] = stack_trace
report['message'] = message
report['logger_name'] = record.name
report['log_module'] = find_log_caller_module(record)
report['log_lineno'] = record.lineno
if hasattr(record, "request"):
add_request_metadata(report, record.request) # type: ignore # record.request is added dynamically
except Exception:
report['message'] = "Exception in preparing exception report!"
logging.warning(report['message'], exc_info=True)
report['stack_trace'] = "See /var/log/zulip/errors.log"
if settings.DEBUG_ERROR_REPORTING: # nocoverage
logging.warning("Reporting an error to admins...")
logging.warning("Reporting an error to admins: {} {} {} {} {}" .format(
record.levelname, report['logger_name'], report['log_module'],
report['message'], report['stack_trace']))
try:
if settings.STAGING_ERROR_NOTIFICATIONS:
# On staging, process the report directly so it can happen inside this
# try/except to prevent looping
from zerver.lib.error_notify import notify_server_error
notify_server_error(report, is_bugdown_rendering_exception)
else:
queue_json_publish('error_reports', dict(
type = "server",
report = report,
))
except Exception:
# If this breaks, complain loudly but don't pass the traceback up the stream
# However, we *don't* want to use logging.exception since that could trigger a loop.
logging.warning("Reporting an exception triggered an exception!", exc_info=True)
| apache-2.0 |
baldengineers/mapper | tf2/prefabs/ramp.py | 1 | 32561 | import os
import math
def rotatePoint(centerPoint,point,angle):
angle = math.radians(angle)
temp_point = point[0]-centerPoint[0] , point[1]-centerPoint[1]
temp_point = ( temp_point[0]*math.cos(angle)-temp_point[1]*math.sin(angle) , temp_point[0]*math.sin(angle)+temp_point[1]*math.cos(angle))
temp_point = temp_point[0]+centerPoint[0] , temp_point[1]+centerPoint[1]
return temp_point
def createTile(posx, posy, id_num, world_id_num, entity_num, placeholder_list, rotation, level):
looplist = '1'
values=[]#Values are all of the lines of a prefab that have the vertex coords
f = open('tf2/prefab_template/ramp.txt', 'r+')
lines = f.readlines() #gathers each line of the prefab and puts numbers them
if rotation == 0:
x1 = posx*1*512
y1 = posy*-1*512
z1 = level*448 + 64
x2 = posx*1*512 + (512)
y2 = posy*-1*512
z2 = level*448 + 64
x3 = posx*1*512 + (512)
y3 = posy*-1*512 + (-512)
z3 = level*448 + 64
x4 = posx*1*512
y4 = posy*-1*512 + (-512)
z4 = level*448 + 0
x5 = posx*1*512 + (512)
y5 = posy*-1*512 + (-512)
z5 = level*448 + 0
x6 = posx*1*512 + (512)
y6 = posy*-1*512
z6 = level*448 + 0
x7 = posx*1*512
y7 = posy*-1*512
z7 = level*448 + 64
x8 = posx*1*512
y8 = posy*-1*512 + (-512)
z8 = level*448 + 64
x9 = posx*1*512
y9 = posy*-1*512 + (-512)
z9 = level*448 + 0
x10 = posx*1*512 + (512)
y10 = posy*-1*512
z10 = level*448 + 0
x11 = posx*1*512 + (512)
y11 = posy*-1*512 + (-512)
z11 = level*448 + 0
x12 = posx*1*512 + (512)
y12 = posy*-1*512 + (-512)
z12 = level*448 + 64
x13 = posx*1*512 + (512)
y13 = posy*-1*512
z13 = level*448 + 64
x14 = posx*1*512
y14 = posy*-1*512
z14 = level*448 + 64
x15 = posx*1*512
y15 = posy*-1*512
z15 = level*448 + 0
x16 = posx*1*512 + (512)
y16 = posy*-1*512 + (-512)
z16 = level*448 + 0
x17 = posx*1*512
y17 = posy*-1*512 + (-512)
z17 = level*448 + 0
x18 = posx*1*512
y18 = posy*-1*512 + (-512)
z18 = level*448 + 64
x19 = posx*1*512 + (-64)
y19 = posy*-1*512
z19 = level*448 + 64
x20 = posx*1*512 + (-64)
y20 = posy*-1*512 + (-512)
z20 = level*448 + 64
x21 = posx*1*512 + (512)
y21 = posy*-1*512 + (-512)
z21 = level*448 + 64
x22 = posx*1*512 + (512)
y22 = posy*-1*512
z22 = level*448 + 64
x23 = posx*1*512 + (512)
y23 = posy*-1*512 + (-512)
z23 = level*448 + 64
x24 = posx*1*512 + (512)
y24 = posy*-1*512 + (-512)
z24 = level*448 + 448
x25 = posx*1*512 + (-64)
y25 = posy*-1*512
z25 = level*448 + 64
x26 = posx*1*512 + (512)
y26 = posy*-1*512
z26 = level*448 + 64
x27 = posx*1*512 + (512)
y27 = posy*-1*512
z27 = level*448 + 448
x28 = posx*1*512 + (512)
y28 = posy*-1*512 + (-512)
z28 = level*448 + 64
x29 = posx*1*512 + (-64)
y29 = posy*-1*512 + (-512)
z29 = level*448 + 64
x30 = posx*1*512 + (512)
y30 = posy*-1*512 + (-512)
z30 = level*448 + 448
x31 = posx*1*512 + (-64)
y31 = posy*-1*512 + (-512)
z31 = level*448 + 64
x32 = posx*1*512 + (-64)
y32 = posy*-1*512
z32 = level*448 + 64
x33 = posx*1*512 + (512)
y33 = posy*-1*512
z33 = level*448 + 448
#INSERT_ROT_0_PY_LIST
elif rotation == 1:
x1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[0])
y1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[1])
z1 = level*448 + 64
x2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z2 = level*448 + 64
x3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z3 = level*448 + 64
x4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[0])
y4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[1])
z4 = level*448 + 0
x5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z5 = level*448 + 0
x6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z6 = level*448 + 0
x7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[0])
y7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[1])
z7 = level*448 + 64
x8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[0])
y8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[1])
z8 = level*448 + 64
x9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[0])
y9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[1])
z9 = level*448 + 0
x10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z10 = level*448 + 0
x11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z11 = level*448 + 0
x12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z12 = level*448 + 64
x13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z13 = level*448 + 64
x14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[0])
y14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[1])
z14 = level*448 + 64
x15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[0])
y15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 270)[1])
z15 = level*448 + 0
x16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z16 = level*448 + 0
x17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[0])
y17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[1])
z17 = level*448 + 0
x18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[0])
y18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 270)[1])
z18 = level*448 + 64
x19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[0])
y19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[1])
z19 = level*448 + 64
x20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[0])
y20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[1])
z20 = level*448 + 64
x21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z21 = level*448 + 64
x22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z22 = level*448 + 64
x23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z23 = level*448 + 64
x24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z24 = level*448 + 448
x25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[0])
y25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[1])
z25 = level*448 + 64
x26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z26 = level*448 + 64
x27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z27 = level*448 + 448
x28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z28 = level*448 + 64
x29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[0])
y29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[1])
z29 = level*448 + 64
x30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[0])
y30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 270)[1])
z30 = level*448 + 448
x31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[0])
y31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 270)[1])
z31 = level*448 + 64
x32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[0])
y32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 270)[1])
z32 = level*448 + 64
x33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[0])
y33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 270)[1])
z33 = level*448 + 448
#INSERT_ROT_1_PY_LIST
elif rotation == 2:
x1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[0])
y1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[1])
z1 = level*448 + 64
x2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z2 = level*448 + 64
x3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z3 = level*448 + 64
x4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[0])
y4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[1])
z4 = level*448 + 0
x5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z5 = level*448 + 0
x6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z6 = level*448 + 0
x7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[0])
y7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[1])
z7 = level*448 + 64
x8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[0])
y8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[1])
z8 = level*448 + 64
x9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[0])
y9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[1])
z9 = level*448 + 0
x10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z10 = level*448 + 0
x11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z11 = level*448 + 0
x12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z12 = level*448 + 64
x13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z13 = level*448 + 64
x14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[0])
y14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[1])
z14 = level*448 + 64
x15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[0])
y15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 180)[1])
z15 = level*448 + 0
x16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z16 = level*448 + 0
x17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[0])
y17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[1])
z17 = level*448 + 0
x18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[0])
y18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 180)[1])
z18 = level*448 + 64
x19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[0])
y19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[1])
z19 = level*448 + 64
x20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[0])
y20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[1])
z20 = level*448 + 64
x21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z21 = level*448 + 64
x22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z22 = level*448 + 64
x23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z23 = level*448 + 64
x24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z24 = level*448 + 448
x25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[0])
y25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[1])
z25 = level*448 + 64
x26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z26 = level*448 + 64
x27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z27 = level*448 + 448
x28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z28 = level*448 + 64
x29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[0])
y29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[1])
z29 = level*448 + 64
x30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[0])
y30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 180)[1])
z30 = level*448 + 448
x31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[0])
y31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 180)[1])
z31 = level*448 + 64
x32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[0])
y32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 180)[1])
z32 = level*448 + 64
x33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[0])
y33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 180)[1])
z33 = level*448 + 448
#INSERT_ROT_2_PY_LIST
elif rotation == 3:
x1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[0])
y1 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[1])
z1 = level*448 + 64
x2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y2 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z2 = level*448 + 64
x3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y3 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z3 = level*448 + 64
x4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[0])
y4 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[1])
z4 = level*448 + 0
x5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y5 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z5 = level*448 + 0
x6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y6 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z6 = level*448 + 0
x7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[0])
y7 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[1])
z7 = level*448 + 64
x8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[0])
y8 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[1])
z8 = level*448 + 64
x9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[0])
y9 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[1])
z9 = level*448 + 0
x10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y10 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z10 = level*448 + 0
x11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y11 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z11 = level*448 + 0
x12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y12 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z12 = level*448 + 64
x13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y13 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z13 = level*448 + 64
x14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[0])
y14 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[1])
z14 = level*448 + 64
x15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[0])
y15 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512), 90)[1])
z15 = level*448 + 0
x16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y16 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z16 = level*448 + 0
x17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[0])
y17 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[1])
z17 = level*448 + 0
x18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[0])
y18 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512, posy*-1*512 + (-512)), 90)[1])
z18 = level*448 + 64
x19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[0])
y19 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[1])
z19 = level*448 + 64
x20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[0])
y20 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[1])
z20 = level*448 + 64
x21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y21 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z21 = level*448 + 64
x22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y22 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z22 = level*448 + 64
x23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y23 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z23 = level*448 + 64
x24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y24 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z24 = level*448 + 448
x25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[0])
y25 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[1])
z25 = level*448 + 64
x26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y26 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z26 = level*448 + 64
x27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y27 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z27 = level*448 + 448
x28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y28 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z28 = level*448 + 64
x29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[0])
y29 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[1])
z29 = level*448 + 64
x30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[0])
y30 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512 + (-512)), 90)[1])
z30 = level*448 + 448
x31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[0])
y31 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512 + (-512)), 90)[1])
z31 = level*448 + 64
x32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[0])
y32 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (-64), posy*-1*512), 90)[1])
z32 = level*448 + 64
x33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[0])
y33 = int(rotatePoint((posx*512+256,posy*-1*512-256), (posx*1*512 + (512), posy*-1*512), 90)[1])
z33 = level*448 + 448
#INSERT_ROT_3_PY_LIST
var_count = 33
values = "".join(lines)#converting list to string
ogvalues = "".join(lines)
normal_list,axislist,negaxislist,vaxis,uaxis=[],['1 0 0 1','0 1 0 1','0 0 1 1'],['-1 0 0 1','0 -1 0 1','0 0 -1 1'],0,0
def evaluate(coords):
dist_x,dist_y,dist_z = abs(coords[0]),abs(coords[1]),abs(coords[2]),
if dist_x >= dist_y and dist_x >= dist_z:
return axislist[0]
if dist_y >= dist_z:
return axislist[1]
return axislist[2]
def get_normal(coord_list):
vector_a = (coord_list[1][0]-coord_list[0][0],coord_list[1][1]-coord_list[0][1],coord_list[1][2]-coord_list[0][2])
vector_b = (coord_list[2][0]-coord_list[0][0],coord_list[2][1]-coord_list[0][1],coord_list[2][2]-coord_list[0][2])
normal = (vector_a[1]*vector_b[2]-vector_a[2]*vector_b[1],vector_a[2]*vector_b[0]-vector_a[0]*vector_b[2],vector_a[0]*vector_b[1]-vector_a[1]*vector_b[0])
return normal
for normal_num in range(1,var_count+1,3):
normal_list=[]
for i in range(3):
normal_list.append([])
for var in ["x", "y", "z"]:
normal_list[i].append(eval(var+str(normal_num+i)))
coords = get_normal(normal_list)
response = evaluate(coords)
if response == axislist[0]:
uaxis = axislist[1]
else:
uaxis = axislist[0]
if response == axislist[2]:
vaxis = negaxislist[1]
else:
vaxis = negaxislist[2]
values = values.replace('AXIS_REPLACE_U',uaxis,1)
values = values.replace('AXIS_REPLACE_V',vaxis,1)
for i in range(ogvalues.count("world_idnum")):
values = values.replace('world_idnum', str(world_id_num), 1)
world_id_num += 1
for var in ["x", "y", "z"]:
for count in range(1,var_count+1):
string = var + str(count)
string_var = str(eval(var + str(count)))
if var == "z":
values = values.replace(string + ")",string_var + ")") #we need to do this or else it will mess up on 2 digit numbers
else:
values = values.replace(string + " ",string_var + " ")
for i in range(ogvalues.count('id_num')):
values = values.replace('id_num', str(id_num), 1)
id_num = id_num+1
if "ROTATION_RIGHT" in values:
if rotation == 0:
values = values.replace("ROTATION_RIGHT","0 0 0",1)
elif rotation == 1:
values = values.replace("ROTATION_RIGHT","0 270 0",1)
elif rotation == 2:
values = values.replace("ROTATION_RIGHT","0 180 0",1)
elif rotation == 3:
values = values.replace("ROTATION_RIGHT","0 90 0",1)
if "ROTATION_UP" in values:
if rotation == 0:
values = values.replace("ROTATION_UP","0 90 0",1)
elif rotation == 1:
values = values.replace("ROTATION_UP","0 0 0",1)
elif rotation == 2:
values = values.replace("ROTATION_UP","0 270 0",1)
elif rotation == 3:
values = values.replace("ROTATION_UP","0 180 0",1)
if "ROTATION_LEFT" in values:
if rotation == 0:
values = values.replace("ROTATION_LEFT","0 180 0",1)
elif rotation == 1:
values = values.replace("ROTATION_LEFT","0 90 0",1)
elif rotation == 2:
values = values.replace("ROTATION_LEFT","0 0 0",1)
elif rotation == 3:
values = values.replace("ROTATION_LEFT","0 270 0",1)
if "ROTATION_DOWN" in values:
if rotation == 0:
values = values.replace("ROTATION_DOWN","0 270 0",1)
elif rotation == 1:
values = values.replace("ROTATION_DOWN","0 180 0",1)
elif rotation == 2:
values = values.replace("ROTATION_DOWN","0 90 0",1)
elif rotation == 3:
values = values.replace("ROTATION_DOWN","0 0 0",1)
values = values.replace('"[0 0 0 1] 0.25"','"[1 1 1 1] 0.25"')
values = values.replace('"[0 0 1 0] 0.25"','"[1 1 1 1] 0.25"')
values = values.replace('"[0 1 0 0] 0.25"','"[1 1 1 1] 0.25"')
values = values.replace('"[1 0 0 0] 0.25"','"[1 1 1 1] 0.25"')
#INSERT_ENT_CODE
return values, id_num, world_id_num | gpl-3.0 |
jaredweiss/nupic | nupic/regions/ImageSensorFilters/Lines.py | 15 | 2799 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
"""
import random
import numpy
from PIL import Image
from PIL import ImageDraw
from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter, uint
class Lines(BaseFilter):
"""
Adds a random number of dark lines to the image.
"""
def __init__(self, difficulty = 0.5, seed=None, reproducible=False):
"""
@param difficulty -- Value between 0.0 and 1.0 that controls how many lines to add in image.
@param seed -- Seed value for random number generator, to produce
reproducible results.
@param reproducible -- Whether to seed the random number generator based
on a hash of the image pixels upon each call to process().
'seed' and 'reproducible' cannot be used together.
"""
BaseFilter.__init__(self, seed, reproducible)
self.difficulty = difficulty
#Maximum number of lines to add
self.maxLines = 10
def process(self, image):
"""
@param image -- The image to process.
Returns a single image, or a list containing one or more images.
"""
BaseFilter.process(self, image)
s = min(image.size)
sizeRange = [0, s]
imageArray = numpy.array(image.split()[0].getdata())
newImage = Image.new("LA", image.size)
newImage.putdata([uint(p) for p in imageArray])
newImage.putalpha(image.split()[1])
for i in xrange(int(self.difficulty*self.maxLines)):
# Generate random line
start = (random.randint(sizeRange[0], sizeRange[1]),
random.randint(sizeRange[0], sizeRange[1]))
end = (random.randint(sizeRange[0], sizeRange[1]),
random.randint(sizeRange[0], sizeRange[1]))
# Generate random color
color = random.randint(0,255)
# Add the line to the image
draw = ImageDraw.Draw(newImage)
draw.line((start, end), fill=color)
return newImage
| gpl-3.0 |
lache/RacingKingLee | monitor/engine.win64/2.74/python/lib/encodings/iso8859_4.py | 272 | 13376 | """ Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-4',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0138' # 0xA2 -> LATIN SMALL LETTER KRA
'\u0156' # 0xA3 -> LATIN CAPITAL LETTER R WITH CEDILLA
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
'\u013b' # 0xA6 -> LATIN CAPITAL LETTER L WITH CEDILLA
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
'\u0112' # 0xAA -> LATIN CAPITAL LETTER E WITH MACRON
'\u0122' # 0xAB -> LATIN CAPITAL LETTER G WITH CEDILLA
'\u0166' # 0xAC -> LATIN CAPITAL LETTER T WITH STROKE
'\xad' # 0xAD -> SOFT HYPHEN
'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
'\u02db' # 0xB2 -> OGONEK
'\u0157' # 0xB3 -> LATIN SMALL LETTER R WITH CEDILLA
'\xb4' # 0xB4 -> ACUTE ACCENT
'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
'\u013c' # 0xB6 -> LATIN SMALL LETTER L WITH CEDILLA
'\u02c7' # 0xB7 -> CARON
'\xb8' # 0xB8 -> CEDILLA
'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
'\u0113' # 0xBA -> LATIN SMALL LETTER E WITH MACRON
'\u0123' # 0xBB -> LATIN SMALL LETTER G WITH CEDILLA
'\u0167' # 0xBC -> LATIN SMALL LETTER T WITH STROKE
'\u014a' # 0xBD -> LATIN CAPITAL LETTER ENG
'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u012a' # 0xCF -> LATIN CAPITAL LETTER I WITH MACRON
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
'\u0136' # 0xD3 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0168' # 0xDD -> LATIN CAPITAL LETTER U WITH TILDE
'\u016a' # 0xDE -> LATIN CAPITAL LETTER U WITH MACRON
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u012b' # 0xEF -> LATIN SMALL LETTER I WITH MACRON
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
'\u0137' # 0xF3 -> LATIN SMALL LETTER K WITH CEDILLA
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0169' # 0xFD -> LATIN SMALL LETTER U WITH TILDE
'\u016b' # 0xFE -> LATIN SMALL LETTER U WITH MACRON
'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
jelugbo/ddi | common/djangoapps/pipeline_js/views.py | 162 | 1215 | """
Views for returning XModule JS (used by requirejs)
"""
import json
from django.conf import settings
from django.http import HttpResponse
from staticfiles.storage import staticfiles_storage
from edxmako.shortcuts import render_to_response
def get_xmodule_urls():
"""
Returns a list of the URLs to hit to grab all the XModule JS
"""
if settings.DEBUG:
paths = [path.replace(".coffee", ".js") for path in
settings.PIPELINE_JS['module-js']['source_filenames']]
else:
paths = [settings.PIPELINE_JS['module-js']['output_filename']]
return [staticfiles_storage.url(path) for path in paths]
def xmodule_js_files(request):
"""
View function that returns XModule URLs as a JSON list; meant to be used
as an API
"""
urls = get_xmodule_urls()
return HttpResponse(json.dumps(urls), content_type="application/json")
def requirejs_xmodule(request):
"""
View function that returns a requirejs-wrapped Javascript file that
loads all the XModule URLs; meant to be loaded via requireJS
"""
return render_to_response(
"xmodule.js",
{"urls": get_xmodule_urls()},
content_type="text/javascript",
)
| agpl-3.0 |
RobertABT/heightmap | build/matplotlib/examples/axes_grid/scatter_hist.py | 8 | 1562 | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
# the random data
x = np.random.randn(1000)
y = np.random.randn(1000)
fig, axScatter = plt.subplots(figsize=(5.5,5.5))
# the scatter plot:
axScatter.scatter(x, y)
axScatter.set_aspect(1.)
# create new axes on the right and on the top of the current axes
# The first argument of the new_vertical(new_horizontal) method is
# the height (width) of the axes to be created in inches.
divider = make_axes_locatable(axScatter)
axHistx = divider.append_axes("top", 1.2, pad=0.1, sharex=axScatter)
axHisty = divider.append_axes("right", 1.2, pad=0.1, sharey=axScatter)
# make some labels invisible
plt.setp(axHistx.get_xticklabels() + axHisty.get_yticklabels(),
visible=False)
# now determine nice limits by hand:
binwidth = 0.25
xymax = np.max( [np.max(np.fabs(x)), np.max(np.fabs(y))] )
lim = ( int(xymax/binwidth) + 1) * binwidth
bins = np.arange(-lim, lim + binwidth, binwidth)
axHistx.hist(x, bins=bins)
axHisty.hist(y, bins=bins, orientation='horizontal')
# the xaxis of axHistx and yaxis of axHisty are shared with axScatter,
# thus there is no need to manually adjust the xlim and ylim of these
# axis.
#axHistx.axis["bottom"].major_ticklabels.set_visible(False)
for tl in axHistx.get_xticklabels():
tl.set_visible(False)
axHistx.set_yticks([0, 50, 100])
#axHisty.axis["left"].major_ticklabels.set_visible(False)
for tl in axHisty.get_yticklabels():
tl.set_visible(False)
axHisty.set_xticks([0, 50, 100])
plt.draw()
plt.show()
| mit |
damonkohler/sl4a | python/src/Lib/compiler/symbols.py | 51 | 14427 | """Module symbol-table generator"""
from compiler import ast
from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL, SC_UNKNOWN
from compiler.misc import mangle
import types
import sys
MANGLE_LEN = 256
class Scope:
# XXX how much information do I need about each name?
def __init__(self, name, module, klass=None):
self.name = name
self.module = module
self.defs = {}
self.uses = {}
self.globals = {}
self.params = {}
self.frees = {}
self.cells = {}
self.children = []
# nested is true if the class could contain free variables,
# i.e. if it is nested within another function.
self.nested = None
self.generator = None
self.klass = None
if klass is not None:
for i in range(len(klass)):
if klass[i] != '_':
self.klass = klass[i:]
break
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def mangle(self, name):
if self.klass is None:
return name
return mangle(name, self.klass)
def add_def(self, name):
self.defs[self.mangle(name)] = 1
def add_use(self, name):
self.uses[self.mangle(name)] = 1
def add_global(self, name):
name = self.mangle(name)
if name in self.uses or name in self.defs:
pass # XXX warn about global following def/use
if name in self.params:
raise SyntaxError, "%s in %s is global and parameter" % \
(name, self.name)
self.globals[name] = 1
self.module.add_def(name)
def add_param(self, name):
name = self.mangle(name)
self.defs[name] = 1
self.params[name] = 1
def get_names(self):
d = {}
d.update(self.defs)
d.update(self.uses)
d.update(self.globals)
return d.keys()
def add_child(self, child):
self.children.append(child)
def get_children(self):
return self.children
def DEBUG(self):
print >> sys.stderr, self.name, self.nested and "nested" or ""
print >> sys.stderr, "\tglobals: ", self.globals
print >> sys.stderr, "\tcells: ", self.cells
print >> sys.stderr, "\tdefs: ", self.defs
print >> sys.stderr, "\tuses: ", self.uses
print >> sys.stderr, "\tfrees:", self.frees
def check_name(self, name):
"""Return scope of name.
The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
"""
if name in self.globals:
return SC_GLOBAL
if name in self.cells:
return SC_CELL
if name in self.defs:
return SC_LOCAL
if self.nested and (name in self.frees or name in self.uses):
return SC_FREE
if self.nested:
return SC_UNKNOWN
else:
return SC_GLOBAL
def get_free_vars(self):
if not self.nested:
return ()
free = {}
free.update(self.frees)
for name in self.uses.keys():
if name not in self.defs and name not in self.globals:
free[name] = 1
return free.keys()
def handle_children(self):
for child in self.children:
frees = child.get_free_vars()
globals = self.add_frees(frees)
for name in globals:
child.force_global(name)
def force_global(self, name):
"""Force name to be global in scope.
Some child of the current node had a free reference to name.
When the child was processed, it was labelled a free
variable. Now that all its enclosing scope have been
processed, the name is known to be a global or builtin. So
walk back down the child chain and set the name to be global
rather than free.
Be careful to stop if a child does not think the name is
free.
"""
self.globals[name] = 1
if name in self.frees:
del self.frees[name]
for child in self.children:
if child.check_name(name) == SC_FREE:
child.force_global(name)
def add_frees(self, names):
"""Process list of free vars from nested scope.
Returns a list of names that are either 1) declared global in the
parent or 2) undefined in a top-level parent. In either case,
the nested scope should treat them as globals.
"""
child_globals = []
for name in names:
sc = self.check_name(name)
if self.nested:
if sc == SC_UNKNOWN or sc == SC_FREE \
or isinstance(self, ClassScope):
self.frees[name] = 1
elif sc == SC_GLOBAL:
child_globals.append(name)
elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
else:
if sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
return child_globals
def get_cell_vars(self):
return self.cells.keys()
class ModuleScope(Scope):
__super_init = Scope.__init__
def __init__(self):
self.__super_init("global", self)
class FunctionScope(Scope):
pass
class GenExprScope(Scope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("generator expression<%d>"%i, module, klass)
self.add_param('.0')
def get_names(self):
keys = Scope.get_names(self)
return keys
class LambdaScope(FunctionScope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("lambda.%d" % i, module, klass)
class ClassScope(Scope):
__super_init = Scope.__init__
def __init__(self, name, module):
self.__super_init(name, module, name)
class SymbolVisitor:
def __init__(self):
self.scopes = {}
self.klass = None
# node that define new scopes
def visitModule(self, node):
scope = self.module = self.scopes[node] = ModuleScope()
self.visit(node.node, scope)
visitExpression = visitModule
def visitFunction(self, node, parent):
if node.decorators:
self.visit(node.decorators, parent)
parent.add_def(node.name)
for n in node.defaults:
self.visit(n, parent)
scope = FunctionScope(node.name, self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExpr(self, node, parent):
scope = GenExprScope(self.module, self.klass);
if parent.nested or isinstance(parent, FunctionScope) \
or isinstance(parent, GenExprScope):
scope.nested = 1
self.scopes[node] = scope
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExprInner(self, node, scope):
for genfor in node.quals:
self.visit(genfor, scope)
self.visit(node.expr, scope)
def visitGenExprFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.iter, scope)
for if_ in node.ifs:
self.visit(if_, scope)
def visitGenExprIf(self, node, scope):
self.visit(node.test, scope)
def visitLambda(self, node, parent, assign=0):
# Lambda is an expression, so it could appear in an expression
# context where assign is passed. The transformer should catch
# any code that has a lambda on the left-hand side.
assert not assign
for n in node.defaults:
self.visit(n, parent)
scope = LambdaScope(self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def _do_args(self, scope, args):
for name in args:
if type(name) == types.TupleType:
self._do_args(scope, name)
else:
scope.add_param(name)
def handle_free_vars(self, scope, parent):
parent.add_child(scope)
scope.handle_children()
def visitClass(self, node, parent):
parent.add_def(node.name)
for n in node.bases:
self.visit(n, parent)
scope = ClassScope(node.name, self.module)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
if node.doc is not None:
scope.add_def('__doc__')
scope.add_def('__module__')
self.scopes[node] = scope
prev = self.klass
self.klass = node.name
self.visit(node.code, scope)
self.klass = prev
self.handle_free_vars(scope, parent)
# name can be a def or a use
# XXX a few calls and nodes expect a third "assign" arg that is
# true if the name is being used as an assignment. only
# expressions contained within statements may have the assign arg.
def visitName(self, node, scope, assign=0):
if assign:
scope.add_def(node.name)
else:
scope.add_use(node.name)
# operations that bind new names
def visitFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.list, scope)
self.visit(node.body, scope)
if node.else_:
self.visit(node.else_, scope)
def visitFrom(self, node, scope):
for name, asname in node.names:
if name == "*":
continue
scope.add_def(asname or name)
def visitImport(self, node, scope):
for name, asname in node.names:
i = name.find(".")
if i > -1:
name = name[:i]
scope.add_def(asname or name)
def visitGlobal(self, node, scope):
for name in node.names:
scope.add_global(name)
def visitAssign(self, node, scope):
"""Propagate assignment flag down to child nodes.
The Assign node doesn't itself contains the variables being
assigned to. Instead, the children in node.nodes are visited
with the assign flag set to true. When the names occur in
those nodes, they are marked as defs.
Some names that occur in an assignment target are not bound by
the assignment, e.g. a name occurring inside a slice. The
visitor handles these nodes specially; they do not propagate
the assign flag to their children.
"""
for n in node.nodes:
self.visit(n, scope, 1)
self.visit(node.expr, scope)
def visitAssName(self, node, scope, assign=1):
scope.add_def(node.name)
def visitAssAttr(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
def visitSubscript(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
for n in node.subs:
self.visit(n, scope, 0)
def visitSlice(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
if node.lower:
self.visit(node.lower, scope, 0)
if node.upper:
self.visit(node.upper, scope, 0)
def visitAugAssign(self, node, scope):
# If the LHS is a name, then this counts as assignment.
# Otherwise, it's just use.
self.visit(node.node, scope)
if isinstance(node.node, ast.Name):
self.visit(node.node, scope, 1) # XXX worry about this
self.visit(node.expr, scope)
# prune if statements if tests are false
_const_types = types.StringType, types.IntType, types.FloatType
def visitIf(self, node, scope):
for test, body in node.tests:
if isinstance(test, ast.Const):
if type(test.value) in self._const_types:
if not test.value:
continue
self.visit(test, scope)
self.visit(body, scope)
if node.else_:
self.visit(node.else_, scope)
# a yield statement signals a generator
def visitYield(self, node, scope):
scope.generator = 1
self.visit(node.value, scope)
def list_eq(l1, l2):
return sorted(l1) == sorted(l2)
if __name__ == "__main__":
import sys
from compiler import parseFile, walk
import symtable
def get_names(syms):
return [s for s in [s.get_name() for s in syms.get_symbols()]
if not (s.startswith('_[') or s.startswith('.'))]
for file in sys.argv[1:]:
print file
f = open(file)
buf = f.read()
f.close()
syms = symtable.symtable(buf, file, "exec")
mod_names = get_names(syms)
tree = parseFile(file)
s = SymbolVisitor()
walk(tree, s)
# compare module-level symbols
names2 = s.scopes[tree].get_names()
if not list_eq(mod_names, names2):
print
print "oops", file
print sorted(mod_names)
print sorted(names2)
sys.exit(-1)
d = {}
d.update(s.scopes)
del d[tree]
scopes = d.values()
del d
for s in syms.get_symbols():
if s.is_namespace():
l = [sc for sc in scopes
if sc.name == s.get_name()]
if len(l) > 1:
print "skipping", s.get_name()
else:
if not list_eq(get_names(s.get_namespace()),
l[0].get_names()):
print s.get_name()
print sorted(get_names(s.get_namespace()))
print sorted(l[0].get_names())
sys.exit(-1)
| apache-2.0 |
mdsafwan/Deal-My-Stuff | advertisements/migrations/0007_auto_20151115_1859.py | 1 | 1758 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('advertisements', '0006_auto_20151115_1408'),
]
operations = [
migrations.AlterField(
model_name='advertisement',
name='Buyer_User_ID',
field=models.ForeignKey(related_name='Buyer_User_ID', db_column=b'Buyer_User_ID', to='login.user_details'),
),
migrations.AlterField(
model_name='advertisement',
name='Seller_User_ID',
field=models.ForeignKey(related_name='Seller_User_ID', db_column=b'Seller_User_ID', to='login.user_details'),
),
migrations.AlterField(
model_name='book',
name='Product_ID',
field=models.ForeignKey(to='advertisements.category', db_column=b'Product_ID'),
),
migrations.AlterField(
model_name='category',
name='Advertisement_ID',
field=models.ForeignKey(to='advertisements.advertisement', db_column=b'Advertisement_ID'),
),
migrations.AlterField(
model_name='electronic_gadget',
name='Product_ID',
field=models.ForeignKey(to='advertisements.category', db_column=b'Product_ID'),
),
migrations.AlterField(
model_name='household_item',
name='Product_ID',
field=models.ForeignKey(to='advertisements.category', db_column=b'Product_ID'),
),
migrations.AlterField(
model_name='vehicle',
name='Product_ID',
field=models.ForeignKey(to='advertisements.category', db_column=b'Product_ID'),
),
]
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.